[FFmpeg-devel] [PATCH 2/3] avcodec/nvenc: add master display and light level sei for HDR10
Lance Wang
lance.lmwang at gmail.com
Tue May 28 00:49:11 EEST 2019
On Saturday, May 25, 2019, James Almer <jamrial at gmail.com> wrote:
> On 5/22/2019 3:59 AM, lance.lmwang at gmail.com wrote:
> > From: Limin Wang <lance.lmwang at gmail.com>
> >
> > The testing command for the HDR10 output with nvenc:
> > $ ./ffmpeg_g -y -i 4K.mp4 -c:v hevc_nvenc -g 7 -color_primaries bt2020
> -colorspace bt2020_ncl -color_trc smpte2084 -sei hdr10 \
> > -master_display "G(13250,34500)B(7500,3000)R(
> 34000,16000)WP(15635,16450)L(10000000,50)" -max_cll "0, 0" test.ts
> >
> > Please notice it is preferable to use the frame sei side data than
> master_display and max_cll paramters config
> > ---
> > libavcodec/nvenc.c | 129 ++++++++++++++++++++++++++++++++++++++++
> > libavcodec/nvenc.h | 18 ++++++
> > libavcodec/nvenc_hevc.c | 11 ++++
> > 3 files changed, 158 insertions(+)
> >
> > diff --git a/libavcodec/nvenc.c b/libavcodec/nvenc.c
> > index 75dda6d689..3fd0eca4a5 100644
> > --- a/libavcodec/nvenc.c
> > +++ b/libavcodec/nvenc.c
> > @@ -22,6 +22,9 @@
> > #include "config.h"
> >
> > #include "nvenc.h"
> > +#include "cbs_h265.h"
>
> This doesn't seem right. The encoder isn't using this framework at all.
>
> You're apparently including this only to get the
> H265RawSEIMasteringDisplayColourVolume and
> H265RawSEIContentLightLevelInfo structs, which you don't really need to
> fill sei_data[i].payload
>
> OK, I'll remove the dependent for the two structure and update the patch.
> > +#include "hevc_sei.h"
> > +#include "put_bits.h"
> >
> > #include "libavutil/hwcontext_cuda.h"
> > #include "libavutil/hwcontext.h"
> > @@ -30,6 +33,7 @@
> > #include "libavutil/avassert.h"
> > #include "libavutil/mem.h"
> > #include "libavutil/pixdesc.h"
> > +#include "libavutil/mastering_display_metadata.h"
> > #include "internal.h"
> >
> > #define CHECK_CU(x) FF_CUDA_CHECK_DL(avctx, dl_fn->cuda_dl, x)
> > @@ -1491,6 +1495,46 @@ av_cold int ff_nvenc_encode_init(AVCodecContext
> *avctx)
> > ctx->data_pix_fmt = avctx->pix_fmt;
> > }
> >
> > + ctx->display_primaries_x[0] = 13250;
> > + ctx->display_primaries_y[0] = 34500;
> > + ctx->display_primaries_x[1] = 7500;
> > + ctx->display_primaries_y[1] = 3000;
> > + ctx->display_primaries_x[2] = 34000;
> > + ctx->display_primaries_y[2] = 16000;
> > + ctx->white_point_x = 15635;
> > + ctx->white_point_y = 16450;
> > + ctx->max_display_mastering_luminance = 10000000;
> > + ctx->min_display_mastering_luminance = 500;
> > + ctx->max_content_light_level = 0;
> > + ctx->max_pic_average_light_level = 0;
> > + if (ctx->master_display) {
> > + ret = sscanf(ctx->master_display, "G(%hu,%hu)B(%hu,%hu)R(%hu,%
> hu)WP(%hu,%hu)L(%u,%u)",
> > + &ctx->display_primaries_x[0],
> &ctx->display_primaries_y[0],
> > + &ctx->display_primaries_x[1],
> &ctx->display_primaries_y[1],
> > + &ctx->display_primaries_x[2],
> &ctx->display_primaries_y[2],
> > + &ctx->white_point_x, &ctx->white_point_y,
> > + &ctx->max_display_mastering_luminance,
> &ctx->min_display_mastering_luminance);
> > + if (ret != 10) {
> > + ret = sscanf(ctx->master_display,
> "G[%hu,%hu]B[%hu,%hu]R[%hu,%hu]WP[%hu,%hu]L[%u,%u]",
> > + &ctx->display_primaries_x[0],
> &ctx->display_primaries_y[0],
> > + &ctx->display_primaries_x[1],
> &ctx->display_primaries_y[1],
> > + &ctx->display_primaries_x[2],
> &ctx->display_primaries_y[2],
> > + &ctx->white_point_x, &ctx->white_point_y,
> > + &ctx->max_display_mastering_luminance,
> &ctx->min_display_mastering_luminance);
> > + }
> > +
> > + if (ret != 10) {
> > + av_log(avctx, AV_LOG_INFO, "Failed to parse master
> display(%s)\n", ctx->master_display);
> > + }
> > + }
> > +
> > + if (ctx->max_cll) {
> > + ret = sscanf(ctx->max_cll, "%hu,%hu",
> &ctx->max_content_light_level, &ctx->max_pic_average_light_level);
> > + if (ret != 2) {
> > + av_log(avctx, AV_LOG_INFO, "Failed to parse max cll(%s)\n",
> ctx->max_cll);
> > + }
> > + }
> > +
> > if ((ret = nvenc_load_libraries(avctx)) < 0)
> > return ret;
> >
> > @@ -2110,6 +2154,91 @@ int ff_nvenc_send_frame(AVCodecContext *avctx,
> const AVFrame *frame)
> > }
> > }
> >
> > + if (ctx->sei & SEI_MASTERING_DISPLAY) {
> > + AVFrameSideData *sd = av_frame_get_side_data(frame,
> AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
> > + H265RawSEIMasteringDisplayColourVolume smd;
> > +
> > + if (sd) {
> > + AVMasteringDisplayMetadata *mdm =
> (AVMasteringDisplayMetadata *)sd->data;
> > + // HEVC uses a g,b,r ordering, which we convert from a
> more natural r,g,b
> > + const int mapping[3] = {2, 0, 1};
> > + const int chroma_den = 50000;
> > + const int luma_den = 10000;
> > +
> > + if (mdm->has_primaries && mdm->has_luminance) {
> > +
> > + for (i = 0; i < 3; i++) {
> > + const int j = mapping[i];
> > + smd.display_primaries_x[i] = chroma_den *
> av_q2d(mdm->display_primaries[j][0]);
> > + smd.display_primaries_y[i] = chroma_den *
> av_q2d( mdm->display_primaries[j][1]);
> > + }
> > +
> > + smd.white_point_x = chroma_den *
> av_q2d(mdm->white_point[0]);
> > + smd.white_point_y = chroma_den *
> av_q2d(mdm->white_point[1]);
> > + smd.max_display_mastering_luminance = luma_den *
> av_q2d(mdm->max_luminance);
> > + smd.min_display_mastering_luminance = luma_den *
> av_q2d(mdm->min_luminance);
> > + }
> > + } else {
> > + for (i = 0; i < 3; i++) {
> > + smd.display_primaries_x[i] =
> ctx->display_primaries_x[i];
> > + smd.display_primaries_y[i] =
> ctx->display_primaries_y[i];
> > + }
> > + smd.white_point_x = ctx->white_point_x;
> > + smd.white_point_y = ctx->white_point_y;
> > + smd.max_display_mastering_luminance =
> ctx->max_display_mastering_luminance;
> > + smd.min_display_mastering_luminance =
> ctx->min_display_mastering_luminance;
> > + }
> > +
> > + sei_data[sei_count].payloadSize = sizeof(
> H265RawSEIMasteringDisplayColourVolume);
> > + sei_data[sei_count].payloadType = HEVC_SEI_TYPE_MASTERING_
> DISPLAY_INFO;
> > + sei_data[sei_count].payload =
> av_mallocz(sei_data[sei_count].payloadSize);
> > + if (sei_data[sei_count].payload) {
> > + PutBitContext pb;
> > +
> > + init_put_bits(&pb, sei_data[sei_count].payload,
> sei_data[sei_count].payloadSize);
> > + for (i = 0; i < 3; i++) {
> > + put_bits(&pb, 16, smd.display_primaries_x[i]);
> > + put_bits(&pb, 16, smd.display_primaries_y[i]);
> > + }
> > + put_bits(&pb, 16, smd.white_point_x);
> > + put_bits(&pb, 16, smd.white_point_y);
> > + put_bits(&pb, 32, smd.max_display_mastering_luminance);
> > + put_bits(&pb, 32, smd.min_display_mastering_luminance);
> > + flush_put_bits(&pb);
> > +
> > + sei_count ++;
> > + }
> > + }
> > +
> > + if (ctx->sei & SEI_CONTENT_LIGHT_LEVEL) {
> > + AVFrameSideData *sd = av_frame_get_side_data(frame,
> AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
> > + H265RawSEIContentLightLevelInfo clli;
> > +
> > + if (sd) {
> > + AVContentLightMetadata *clm = (AVContentLightMetadata
> *)sd->data;
> > +
> > + clli.max_content_light_level = FFMIN(clm->MaxCLL,
> 65535);
> > + clli.max_pic_average_light_level = FFMIN(clm->MaxFALL,
> 65535);
> > + } else {
> > + clli.max_content_light_level =
> ctx->max_content_light_level;
> > + clli.max_pic_average_light_level =
> ctx->max_pic_average_light_level;
> > + }
> > +
> > + sei_data[sei_count].payloadSize = sizeof(
> H265RawSEIContentLightLevelInfo);
> > + sei_data[sei_count].payloadType =
> HEVC_SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO;
> > + sei_data[sei_count].payload =
> av_mallocz(sei_data[sei_count].payloadSize);
> > + if (sei_data[sei_count].payload) {
> > + PutBitContext pb;
> > +
> > + init_put_bits(&pb, sei_data[sei_count].payload,
> sei_data[sei_count].payloadSize);
> > + put_bits(&pb, 16, clli.max_content_light_level);
> > + put_bits(&pb, 16, clli.max_pic_average_light_level);
> > + flush_put_bits(&pb);
> > +
> > + sei_count ++;
> > + }
> > + }
> > +
> > nvenc_codec_specific_pic_params(avctx, &pic_params, sei_data,
> sei_count);
> > } else {
> > pic_params.encodePicFlags = NV_ENC_PIC_FLAG_EOS;
> > diff --git a/libavcodec/nvenc.h b/libavcodec/nvenc.h
> > index ddd6168409..583c48d090 100644
> > --- a/libavcodec/nvenc.h
> > +++ b/libavcodec/nvenc.h
> > @@ -54,6 +54,11 @@ typedef void ID3D11Device;
> > #define NVENC_HAVE_HEVC_BFRAME_REF_MODE
> > #endif
> >
> > +enum {
> > + SEI_MASTERING_DISPLAY = 0x08,
> > + SEI_CONTENT_LIGHT_LEVEL = 0x10,
> > +};
> > +
> > typedef struct NvencSurface
> > {
> > NV_ENC_INPUT_PTR input_surface;
> > @@ -192,6 +197,19 @@ typedef struct NvencContext
> > int coder;
> > int b_ref_mode;
> > int a53_cc;
> > + uint64_t sei;
> > +
> > + char *master_display;
> > + char *max_cll;
> > + uint16_t display_primaries_x[3];
> > + uint16_t display_primaries_y[3];
> > + uint16_t white_point_x;
> > + uint16_t white_point_y;
> > + uint32_t max_display_mastering_luminance;
> > + uint32_t min_display_mastering_luminance;
> > +
> > + uint16_t max_content_light_level;
> > + uint16_t max_pic_average_light_level;
> > } NvencContext;
> >
> > int ff_nvenc_encode_init(AVCodecContext *avctx);
> > diff --git a/libavcodec/nvenc_hevc.c b/libavcodec/nvenc_hevc.c
> > index d567d960ba..74ebd03d8e 100644
> > --- a/libavcodec/nvenc_hevc.c
> > +++ b/libavcodec/nvenc_hevc.c
> > @@ -127,6 +127,17 @@ static const AVOption options[] = {
> > { "each", "", 0,
> AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, VE, "b_ref_mode" },
> > { "middle", "", 0,
> AV_OPT_TYPE_CONST, { .i64 = 2 }, 0, 0, VE, "b_ref_mode" },
> > #endif
> > + { "sei", "Set SEI to include",
> > +
> OFFSET(sei), AV_OPT_TYPE_FLAGS,
> > + { .i64 =
> SEI_MASTERING_DISPLAY | SEI_CONTENT_LIGHT_LEVEL },
> > + 0, INT_MAX,
> VE, "sei" },
> > + { "hdr10","Include HDR metadata for mastering display colour volume
> and content light level information",
> > + 0,
> AV_OPT_TYPE_CONST, { .i64 = SEI_MASTERING_DISPLAY |
> SEI_CONTENT_LIGHT_LEVEL },
> > + 0, 0, VE,
> "sei" },
> > + { "master_display", "SMPTE ST 2086 master display color volume
> info SEI (HDR), the string format is: \"G(x,y)B(x,y)R(x,y)WP(x,y)L(
> max,min)\"",
> > +
> OFFSET(master_display), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, VE },
> > + { "max_cll", "content light level info, the string
> format is: \"cll, fall\"",
> > +
> OFFSET(max_cll), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, VE },
> > { NULL }
> > };
> >
> >
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel at ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request at ffmpeg.org with subject "unsubscribe".
More information about the ffmpeg-devel
mailing list