[FFmpeg-devel] [PATCH] avcodec/dirac: Add support for decoding interlaced HQ profile
Kieran Kunhya
kierank at obe.tv
Tue Feb 2 16:55:11 CET 2016
>From b919c9fa3d4778872bc9576705b24cd0c8193f4c Mon Sep 17 00:00:00 2001
From: Kieran Kunhya <kierank at obe.tv>
Date: Tue, 2 Feb 2016 15:52:54 +0000
Subject: [PATCH] avcodec/dirac: Add support for decoding interlaced HQ profile
---
libavcodec/dirac.c | 15 ++-----
libavcodec/dirac.h | 1 +
libavcodec/diracdec.c | 118 +++++++++++++++++++++++++++++++++++++-------------
3 files changed, 93 insertions(+), 41 deletions(-)
diff --git a/libavcodec/dirac.c b/libavcodec/dirac.c
index 39df2a8..d19adcf 100644
--- a/libavcodec/dirac.c
+++ b/libavcodec/dirac.c
@@ -324,7 +324,7 @@ int av_dirac_parse_sequence_header(AVDiracSeqHeader **pdsh,
{
AVDiracSeqHeader *dsh;
GetBitContext gb;
- unsigned video_format, picture_coding_mode;
+ unsigned video_format;
int ret;
dsh = av_mallocz(sizeof(*dsh));
@@ -373,17 +373,8 @@ int av_dirac_parse_sequence_header(AVDiracSeqHeader **pdsh,
if (ret < 0)
goto fail;
- /* [DIRAC_STD] picture_coding_mode shall be 0 for fields and 1 for frames
- * currently only used to signal field coding */
- picture_coding_mode = svq3_get_ue_golomb(&gb);
- if (picture_coding_mode != 0) {
- if (log_ctx) {
- av_log(log_ctx, AV_LOG_ERROR, "Unsupported picture coding mode %d",
- picture_coding_mode);
- }
- ret = AVERROR_INVALIDDATA;
- goto fail;
- }
+ /* [DIRAC_STD] picture_coding_mode shall be 1 for fields and 0
for frames */
+ dsh->field_coding = svq3_get_ue_golomb(&gb);
*pdsh = dsh;
return 0;
diff --git a/libavcodec/dirac.h b/libavcodec/dirac.h
index cb80fdc..447fafc 100644
--- a/libavcodec/dirac.h
+++ b/libavcodec/dirac.h
@@ -74,6 +74,7 @@ typedef struct AVDiracSeqHeader {
uint8_t interlaced;
uint8_t top_field_first;
+ uint8_t field_coding;
uint8_t frame_rate_index; ///< index into dirac_frame_rate[]
uint8_t aspect_ratio_index; ///< index into dirac_aspect_ratio[]
diff --git a/libavcodec/diracdec.c b/libavcodec/diracdec.c
index ca44e7b..405cc5e 100644
--- a/libavcodec/diracdec.c
+++ b/libavcodec/diracdec.c
@@ -161,6 +161,8 @@ typedef struct DiracContext {
int dc_prediction; /* has dc prediction */
int globalmc_flag; /* use global motion compensation */
int num_refs; /* number of reference pictures */
+ int field_coding; /* fields instead of frames */
+ int cur_field; /* 0 -> progressive/top, 1 -> bottom */
/* wavelet decoding */
unsigned wavelet_depth; /* depth of the IDWT */
@@ -227,6 +229,9 @@ typedef struct DiracContext {
dirac_weight_func weight_func;
dirac_biweight_func biweight_func;
+ DiracFrame dummy_picture;
+
+ DiracFrame prev_field;
DiracFrame *current_picture;
DiracFrame *ref_pics[2];
@@ -456,9 +461,11 @@ static av_cold int dirac_decode_init(AVCodecContext *avctx)
ff_mpegvideoencdsp_init(&s->mpvencdsp, avctx);
ff_videodsp_init(&s->vdsp, 8);
+ s->dummy_picture.avframe = av_frame_alloc();
for (i = 0; i < MAX_FRAMES; i++) {
s->all_frames[i].avframe = av_frame_alloc();
if (!s->all_frames[i].avframe) {
+ av_frame_free(&s->dummy_picture.avframe);
while (i > 0)
av_frame_free(&s->all_frames[--i].avframe);
return AVERROR(ENOMEM);
@@ -482,6 +489,7 @@ static av_cold int dirac_decode_end(AVCodecContext *avctx)
int i;
dirac_decode_flush(avctx);
+ av_frame_free(&s->dummy_picture.avframe);
for (i = 0; i < MAX_FRAMES; i++)
av_frame_free(&s->all_frames[i].avframe);
@@ -1822,6 +1830,7 @@ static int dirac_decode_frame_internal(DiracContext *s)
for (comp = 0; comp < 3; comp++) {
Plane *p = &s->plane[comp];
uint8_t *frame = s->current_picture->avframe->data[comp];
+ frame += s->cur_field*p->stride;
/* FIXME: small resolutions */
for (i = 0; i < 4; i++)
@@ -1838,11 +1847,12 @@ static int dirac_decode_frame_internal(DiracContext *s)
return ret;
if (!s->num_refs) { /* intra */
+ const int idx = (s->bit_depth - 8) >> 1;
+ const int ostride = p->stride << s->field_coding;
for (y = 0; y < p->height; y += 16) {
- int idx = (s->bit_depth - 8) >> 1;
ff_spatial_idwt_slice2(&d, y+16); /* decode */
- s->diracdsp.put_signed_rect_clamped[idx](frame + y*p->stride,
- p->stride,
+ s->diracdsp.put_signed_rect_clamped[idx](frame + y*ostride,
+ ostride,
p->idwt_buf
+ y*p->idwt_stride,
p->idwt_stride, p->width, 16);
}
@@ -1929,10 +1939,10 @@ static int dirac_decode_picture_header(DiracContext *s)
GetBitContext *gb = &s->gb;
/* [DIRAC_STD] 11.1.1 Picture Header. picture_header() PICTURE_NUM */
- picnum = s->current_picture->avframe->display_picture_number =
get_bits_long(gb, 32);
-
-
- av_log(s->avctx,AV_LOG_DEBUG,"PICTURE_NUM: %d\n",picnum);
+ if (!s->field_coding) {
+ picnum = s->current_picture->avframe->display_picture_number
= get_bits_long(gb, 32);
+ av_log(s->avctx,AV_LOG_DEBUG,"PICTURE_NUM: %d\n",picnum);
+ }
/* if this is the first keyframe after a sequence header, start our
reordering from here */
@@ -2070,6 +2080,10 @@ static int
dirac_decode_data_unit(AVCodecContext *avctx, const uint8_t *buf, int
}
ret = ff_set_dimensions(avctx, dsh->width, dsh->height);
+
+ if (dsh->field_coding)
+ dsh->height >>= 1;
+
if (ret < 0) {
av_freep(&dsh);
return ret;
@@ -2084,6 +2098,8 @@ static int dirac_decode_data_unit(AVCodecContext
*avctx, const uint8_t *buf, int
avctx->profile = dsh->profile;
avctx->level = dsh->level;
avctx->framerate = dsh->framerate;
+ avctx->field_order = dsh->top_field_first ? AV_FIELD_TT :
avctx->field_order;
+ s->field_coding = dsh->field_coding;
s->bit_depth = dsh->bit_depth;
s->version.major = dsh->version.major;
s->version.minor = dsh->version.minor;
@@ -2117,17 +2133,22 @@ static int
dirac_decode_data_unit(AVCodecContext *avctx, const uint8_t *buf, int
return AVERROR_INVALIDDATA;
}
- /* find an unused frame */
- for (i = 0; i < MAX_FRAMES; i++)
- if (s->all_frames[i].avframe->data[0] == NULL)
- pic = &s->all_frames[i];
- if (!pic) {
- av_log(avctx, AV_LOG_ERROR, "framelist full\n");
- return AVERROR_INVALIDDATA;
+ if (!s->field_coding) {
+ /* find an unused frame */
+ for (i = 0; i < MAX_FRAMES; i++) {
+ if (s->all_frames[i].avframe->data[0] == NULL)
+ pic = &s->all_frames[i];
+ if (!pic) {
+ av_log(avctx, AV_LOG_ERROR, "framelist full\n");
+ return AVERROR_INVALIDDATA;
+ }
+ }
+ av_frame_unref(pic->avframe);
+ } else {
+ /* Setup a dummy picture in interlaced mode */
+ pic = &s->dummy_picture;
}
- av_frame_unref(pic->avframe);
-
/* [DIRAC_STD] Defined in 9.6.1 ... */
tmp = parse_code & 0x03; /*
[DIRAC_STD] num_refs() */
if (tmp > 2) {
@@ -2154,20 +2175,54 @@ static int
dirac_decode_data_unit(AVCodecContext *avctx, const uint8_t *buf, int
return AVERROR_INVALIDDATA;
}
- if ((ret = get_buffer_with_edge(avctx, pic->avframe,
(parse_code & 0x0C) == 0x0C ? AV_GET_BUFFER_FLAG_REF : 0)) < 0)
- return ret;
- s->current_picture = pic;
- s->plane[0].stride = pic->avframe->linesize[0];
- s->plane[1].stride = pic->avframe->linesize[1];
- s->plane[2].stride = pic->avframe->linesize[2];
+ if (s->field_coding && !s->hq_picture) {
+ av_log(avctx, AV_LOG_ERROR, "Interlaced input supported
only under the HQ profile!\n");
+ return AVERROR_PATCHWELCOME;
+ }
- if (alloc_buffers(s, FFMAX3(FFABS(s->plane[0].stride),
FFABS(s->plane[1].stride), FFABS(s->plane[2].stride))) < 0)
- return AVERROR(ENOMEM);
+ if (!s->field_coding) {
+ if ((ret = get_buffer_with_edge(avctx, pic->avframe,
(parse_code & 0x0C) == 0x0C ? AV_GET_BUFFER_FLAG_REF : 0)) < 0)
+ return ret;
+ s->current_picture = pic;
+ s->plane[0].stride = pic->avframe->linesize[0];
+ s->plane[1].stride = pic->avframe->linesize[1];
+ s->plane[2].stride = pic->avframe->linesize[2];
- /* [DIRAC_STD] 11.1 Picture parse. picture_parse() */
- ret = dirac_decode_picture_header(s);
- if (ret < 0)
- return ret;
+ if (alloc_buffers(s, FFMAX3(FFABS(s->plane[0].stride),
FFABS(s->plane[1].stride), FFABS(s->plane[2].stride))) < 0)
+ return AVERROR(ENOMEM);
+
+ /* [DIRAC_STD] 11.1 Picture parse. picture_parse() */
+ ret = dirac_decode_picture_header(s);
+ if (ret < 0)
+ return ret;
+ } else {
+ /* [DIRAC_STD] 11.1.1 Picture Header. picture_header()
PICTURE_NUM */
+ uint32_t pict_num = get_bits_long(&s->gb, 32);
+ av_log(s->avctx,AV_LOG_DEBUG,"PICTURE_NUM: %d\n",pict_num);
+
+ s->cur_field = pict_num & 1;
+
+ if (!s->cur_field) {
+ av_frame_unref(s->prev_field.avframe);
+ if ((ret = get_buffer_with_edge(avctx, pic->avframe,
AV_GET_BUFFER_FLAG_REF)) < 0)
+ return ret;
+ s->prev_field.avframe = pic->avframe;
+ s->plane[0].stride = pic->avframe->linesize[0];
+ s->plane[1].stride = pic->avframe->linesize[1];
+ s->plane[2].stride = pic->avframe->linesize[2];
+ s->current_picture = &s->prev_field;
+ } else {
+ s->plane[0].stride = s->current_picture->avframe->linesize[0];
+ s->plane[1].stride = s->current_picture->avframe->linesize[1];
+ s->plane[2].stride = s->current_picture->avframe->linesize[2];
+ s->current_picture->avframe = s->prev_field.avframe;
+ s->current_picture->avframe->display_picture_number =
s->frame_number;
+ }
+
+ ret = dirac_decode_picture_header(s);
+ if (ret < 0)
+ return ret;
+ }
/* [DIRAC_STD] 13.0 Transform data syntax. transform_data() */
ret = dirac_decode_frame_internal(s);
@@ -2194,7 +2249,6 @@ static int dirac_decode_frame(AVCodecContext
*avctx, void *data, int *got_frame,
memset(s->all_frames[i].interpolated, 0,
sizeof(s->all_frames[i].interpolated));
}
- s->current_picture = NULL;
*got_frame = 0;
/* end of stream, so flush delayed pics */
@@ -2267,6 +2321,12 @@ static int dirac_decode_frame(AVCodecContext
*avctx, void *data, int *got_frame,
*got_frame = 1;
}
+ /* No output for the top field, wait for the second */
+ if (s->field_coding) {
+ if (!s->cur_field)
+ *got_frame = 0;
+ }
+
if (*got_frame)
s->frame_number = picture->display_picture_number + 1;
--
1.9.1
More information about the ffmpeg-devel
mailing list