From: Fei Wang <fei.w.w...@intel.com> This support alpha encode for HEVC introduced by Apple: https://developer.apple.com/videos/play/wwdc2019/506/
Currently, it only support RGBA video memory as input. RGB and alpha channel will be encoded in different layers with 4:2:0 color format. And set texture to shared to allow extract alpha channel internally: https://github.com/intel/libvpl/blob/5f6bd8a1e753c8f63a3fd8b36894d6968b808a6d/doc/spec/source/snippets/prg_encoding.c#L752 Example cmdline: ffmpeg.exe -v verbose -hwaccel qsv -hwaccel_output_format qsv -f rawvideo \ -pix_fmt bgra -s:v 1920x1080 -r:v 25 -i input.argb -vf \ 'format=bgra,hwupload=extra_hw_frames=120' -an -c:v hevc_qsv \ -alpha_encode 1 -y out.mp4 Signed-off-by: Fei Wang <fei.w.w...@intel.com> --- doc/encoders.texi | 4 ++ libavcodec/qsvenc.c | 79 +++++++++++++++++++++++++++++++++++++-- libavcodec/qsvenc.h | 9 ++++- libavcodec/qsvenc_hevc.c | 3 ++ libavutil/hwcontext_qsv.c | 4 +- 5 files changed, 93 insertions(+), 6 deletions(-) diff --git a/doc/encoders.texi b/doc/encoders.texi index 0749417db4..6094434ed4 100644 --- a/doc/encoders.texi +++ b/doc/encoders.texi @@ -4036,6 +4036,10 @@ skip_frame metadata indicates the number of missed frames before the current frame. @end table +@item @var{alpha_encode} +Encode Alpha and RGB into different layers introduced by Apple: +https://developer.apple.com/videos/play/wwdc2019/506/. Only support on Windows +with RGBA video memory as input. @end table @subsection MPEG2 Options diff --git a/libavcodec/qsvenc.c b/libavcodec/qsvenc.c index 8200a14012..9af9932035 100644 --- a/libavcodec/qsvenc.c +++ b/libavcodec/qsvenc.c @@ -203,6 +203,9 @@ static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q, #if QSV_HAVE_HE mfxExtHyperModeParam *exthypermodeparam = NULL; #endif +#if QSV_HAVE_AC + mfxExtAlphaChannelEncCtrl *extalphachannel = NULL; +#endif const char *tmp_str = NULL; @@ -220,6 +223,11 @@ static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q, exthypermodeparam = (mfxExtHyperModeParam *)coding_opts[q->exthypermodeparam_idx]; #endif +#if QSV_HAVE_AC + if (q->extaplhachannel_idx > 0) + extalphachannel = (mfxExtAlphaChannelEncCtrl *)coding_opts[q->extaplhachannel_idx]; +#endif + av_log(avctx, AV_LOG_VERBOSE, "profile: %s; level: %"PRIu16"\n", print_profile(avctx->codec_id, info->CodecProfile), info->CodecLevel); @@ -400,6 +408,23 @@ static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q, av_log(avctx, AV_LOG_VERBOSE, "\n"); } #endif + +#if QSV_HAVE_AC + if (extalphachannel) { + av_log(avctx, AV_LOG_VERBOSE, "AlphaChannel Encode: %s; ", print_threestate(extalphachannel->EnableAlphaChannelEncoding)); + + av_log(avctx, AV_LOG_VERBOSE, "Mode: "); + if (extalphachannel->AlphaChannelMode == MFX_ALPHA_MODE_PREMULTIPLIED) + av_log(avctx, AV_LOG_VERBOSE, "PREMULTIPLIED; "); + else if (extalphachannel->AlphaChannelMode == MFX_ALPHA_MODE_STRAIGHT) + av_log(avctx, AV_LOG_VERBOSE, "STRAIGHT; "); + else + av_log(avctx, AV_LOG_VERBOSE, "unknown; "); + av_log(avctx, AV_LOG_VERBOSE, "BitrateRatio: %d", extalphachannel->AlphaChannelBitrateRatio); + + av_log(avctx, AV_LOG_VERBOSE, "\n"); + } +#endif } static void dump_video_vp9_param(AVCodecContext *avctx, QSVEncContext *q, @@ -1150,7 +1175,8 @@ static int init_video_param(AVCodecContext *avctx, QSVEncContext *q) q->extco3.MaxFrameSizeP = q->max_frame_size_p; if (sw_format == AV_PIX_FMT_BGRA && (q->profile == MFX_PROFILE_HEVC_REXT || - q->profile == MFX_PROFILE_UNKNOWN)) + q->profile == MFX_PROFILE_UNKNOWN) && + !q->alpha_encode) q->extco3.TargetChromaFormatPlus1 = MFX_CHROMAFORMAT_YUV444 + 1; q->extco3.ScenarioInfo = q->scenario; @@ -1282,6 +1308,37 @@ static int init_video_param(AVCodecContext *avctx, QSVEncContext *q) } #endif +#if QSV_HAVE_AC + if (q->alpha_encode) { + if (QSV_RUNTIME_VERSION_ATLEAST(q->ver, 2, 13)) { + mfxIMPL impl; + MFXQueryIMPL(q->session, &impl); + + if (MFX_IMPL_VIA_MASK(impl) != MFX_IMPL_VIA_D3D11) { + av_log(avctx, AV_LOG_ERROR, "Alpha Channel Encode requires D3D11VA \n"); + return AVERROR_UNKNOWN; + } + + if (q->param.mfx.CodecId != MFX_CODEC_HEVC) { + av_log(avctx, AV_LOG_ERROR, "Not supported encoder for Alpha Channel Encode. " + "Supported: hevc_qsv \n"); + return AVERROR_UNKNOWN; + } + + q->extaplhachannelparam.Header.BufferId = MFX_EXTBUFF_ALPHA_CHANNEL_ENC_CTRL; + q->extaplhachannelparam.Header.BufferSz = sizeof(q->extaplhachannelparam); + q->extaplhachannelparam.EnableAlphaChannelEncoding = MFX_CODINGOPTION_ON; + q->extaplhachannelparam.AlphaChannelBitrateRatio = 25; + q->extaplhachannelparam.AlphaChannelMode = MFX_ALPHA_MODE_PREMULTIPLIED; + q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extaplhachannelparam; + } else { + av_log(avctx, AV_LOG_ERROR, + "This version of runtime doesn't support Alpha Channel Encode\n"); + return AVERROR_UNKNOWN; + } + } +#endif + if (!check_enc_param(avctx,q)) { av_log(avctx, AV_LOG_ERROR, "some encoding parameters are not supported by the QSV " @@ -1463,12 +1520,21 @@ static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q) }; #endif - mfxExtBuffer *ext_buffers[6 + QSV_HAVE_HE]; +#if QSV_HAVE_AC + mfxExtAlphaChannelEncCtrl alpha_encode_buf = { + .Header.BufferId = MFX_EXTBUFF_ALPHA_CHANNEL_ENC_CTRL, + .Header.BufferSz = sizeof(alpha_encode_buf), + }; +#endif + + mfxExtBuffer *ext_buffers[6 + QSV_HAVE_HE + QSV_HAVE_AC]; int need_pps = avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO; int ret, ext_buf_num = 0, extradata_offset = 0; - q->co2_idx = q->co3_idx = q->exthevctiles_idx = q->exthypermodeparam_idx = -1; + q->co2_idx = q->co3_idx = q->exthevctiles_idx = q->exthypermodeparam_idx = + q->extaplhachannel_idx = -1; + ext_buffers[ext_buf_num++] = (mfxExtBuffer*)&extradata; ext_buffers[ext_buf_num++] = (mfxExtBuffer*)&co; @@ -1497,6 +1563,13 @@ static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q) } #endif +#if QSV_HAVE_AC + if (q->alpha_encode && QSV_RUNTIME_VERSION_ATLEAST(q->ver, 2, 13)) { + q->extaplhachannel_idx = ext_buf_num; + ext_buffers[ext_buf_num++] = (mfxExtBuffer*)&alpha_encode_buf; + } +#endif + q->param.ExtParam = ext_buffers; q->param.NumExtParam = ext_buf_num; diff --git a/libavcodec/qsvenc.h b/libavcodec/qsvenc.h index 4bc77f2f7c..559d40c919 100644 --- a/libavcodec/qsvenc.h +++ b/libavcodec/qsvenc.h @@ -44,11 +44,13 @@ #define QSV_HAVE_VCM 1 #define QSV_HAVE_MF 0 #define QSV_HAVE_HE QSV_VERSION_ATLEAST(2, 4) +#define QSV_HAVE_AC QSV_VERSION_ATLEAST(2, 13) #else #define QSV_HAVE_AVBR 0 #define QSV_HAVE_VCM 0 #define QSV_HAVE_MF !QSV_ONEVPL #define QSV_HAVE_HE 0 +#define QSV_HAVE_AC 0 #endif #define QSV_COMMON_OPTS \ @@ -188,10 +190,13 @@ typedef struct QSVEncContext { mfxFrameSurface1 **opaque_surfaces; AVBufferRef *opaque_alloc_buf; #endif +#if QSV_HAVE_AC + mfxExtAlphaChannelEncCtrl extaplhachannelparam; +#endif mfxExtVideoSignalInfo extvsi; - mfxExtBuffer *extparam_internal[5 + (QSV_HAVE_MF * 2) + (QSV_HAVE_EXT_AV1_PARAM * 2) + QSV_HAVE_HE]; + mfxExtBuffer *extparam_internal[5 + (QSV_HAVE_MF * 2) + (QSV_HAVE_EXT_AV1_PARAM * 2) + QSV_HAVE_HE + QSV_HAVE_AC]; int nb_extparam_internal; mfxExtBuffer **extparam_str; @@ -269,6 +274,7 @@ typedef struct QSVEncContext { int co2_idx; int co3_idx; + int extaplhachannel_idx; int exthevctiles_idx; int exthypermodeparam_idx; int vp9_idx; @@ -319,6 +325,7 @@ typedef struct QSVEncContext { int dual_gfx; AVDictionary *qsv_params; + int alpha_encode; } QSVEncContext; int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q); diff --git a/libavcodec/qsvenc_hevc.c b/libavcodec/qsvenc_hevc.c index 2a397a2919..6ea9c4cdcb 100644 --- a/libavcodec/qsvenc_hevc.c +++ b/libavcodec/qsvenc_hevc.c @@ -365,6 +365,9 @@ static const AVOption options[] = { { "int_ref_qp_delta", "QP difference for the refresh MBs", OFFSET(qsv.int_ref_qp_delta), AV_OPT_TYPE_INT, { .i64 = INT16_MIN }, INT16_MIN, INT16_MAX, VE }, { "int_ref_cycle_dist", "Distance between the beginnings of the intra-refresh cycles in frames", OFFSET(qsv.int_ref_cycle_dist), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, INT16_MAX, VE }, +#if QSV_HAVE_AC + { "alpha_encode", "Encode with alpha channel", OFFSET(qsv.alpha_encode), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE}, +#endif { NULL }, }; diff --git a/libavutil/hwcontext_qsv.c b/libavutil/hwcontext_qsv.c index 721c841c2a..8471b8bcdc 100644 --- a/libavutil/hwcontext_qsv.c +++ b/libavutil/hwcontext_qsv.c @@ -236,7 +236,7 @@ static uint32_t qsv_get_d3d11va_bind_flags(int mem_type) bind_flags = D3D11_BIND_DECODER; if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type)) - bind_flags = D3D11_BIND_RENDER_TARGET; + bind_flags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; return bind_flags; } @@ -590,7 +590,7 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx) if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) { AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx; if (hwctx->frame_type == 0) - hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET; + hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET | MFX_MEMTYPE_SHARED_RESOURCE; if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE) child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED; child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type); -- 2.34.1 _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".