> On Wed, 2022-03-30 at 14:43 +0800, Wenbin Chen wrote: > > Qsv decoder only supports directly output nv12 and p010 to system > > memory. For other format, we need to download frame from qsv format > > to system memory. Now add other supported format to qsvdec. > > > > Signed-off-by: Wenbin Chen <wenbin.c...@intel.com> > > --- > > libavcodec/qsv.c | 36 ++++++++++++++++++++++++++++++++++++ > > libavcodec/qsv_internal.h | 3 +++ > > libavcodec/qsvdec.c | 23 +++++++++++++++++------ > > 3 files changed, 56 insertions(+), 6 deletions(-) > > > > diff --git a/libavcodec/qsv.c b/libavcodec/qsv.c > > index 67d0e3934a..8010eef172 100644 > > --- a/libavcodec/qsv.c > > +++ b/libavcodec/qsv.c > > @@ -244,6 +244,42 @@ int ff_qsv_map_pixfmt(enum AVPixelFormat > format, uint32_t > > *fourcc) > > } > > } > > > > +int ff_qsv_map_frame_to_surface(const AVFrame *frame, > mfxFrameSurface1 > > *surface) > > +{ > > + switch (frame->format) { > > + case AV_PIX_FMT_NV12: > > + case AV_PIX_FMT_P010: > > + surface->Data.Y = frame->data[0]; > > + surface->Data.UV = frame->data[1]; > > + /* The SDK checks Data.V when using system memory for VP9 > encoding */ > > + surface->Data.V = surface->Data.UV + 1; > > + break; > > + case AV_PIX_FMT_X2RGB10LE: > > + case AV_PIX_FMT_BGRA: > > + surface->Data.B = frame->data[0]; > > + surface->Data.G = frame->data[0] + 1; > > + surface->Data.R = frame->data[0] + 2; > > + surface->Data.A = frame->data[0] + 3; > > + break; > > + case AV_PIX_FMT_YUYV422: > > + surface->Data.Y = frame->data[0]; > > + surface->Data.U = frame->data[0] + 1; > > + surface->Data.V = frame->data[0] + 3; > > + break; > > + > > + case AV_PIX_FMT_Y210: > > + surface->Data.Y16 = (mfxU16 *)frame->data[0]; > > + surface->Data.U16 = (mfxU16 *)frame->data[0] + 1; > > + surface->Data.V16 = (mfxU16 *)frame->data[0] + 3; > > + break; > > + default: > > + return MFX_ERR_UNSUPPORTED; > > Please change the return type to mfxStatus if you want to return a mfx error > code, otherwise return a ffmpeg error here. > > Thanks > Haihao >
Thanks for review. I will update it. > > > + } > > + surface->Data.PitchLow = frame->linesize[0]; > > + > > + return 0; > > +} > > + > > int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame) > > { > > int i; > > diff --git a/libavcodec/qsv_internal.h b/libavcodec/qsv_internal.h > > index 58186ea7ca..e2aecdcbd6 100644 > > --- a/libavcodec/qsv_internal.h > > +++ b/libavcodec/qsv_internal.h > > @@ -147,4 +147,7 @@ int ff_qsv_find_surface_idx(QSVFramesContext > *ctx, > > QSVFrame *frame); > > void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame > *frame, > > mfxExtBuffer *param); > > > > +int ff_qsv_map_frame_to_surface(const AVFrame *frame, > mfxFrameSurface1 > > *surface); > > + > > + > > #endif /* AVCODEC_QSV_INTERNAL_H */ > > diff --git a/libavcodec/qsvdec.c b/libavcodec/qsvdec.c > > index 661f15bc75..f159e2690f 100644 > > --- a/libavcodec/qsvdec.c > > +++ b/libavcodec/qsvdec.c > > @@ -132,21 +132,28 @@ static int > qsv_get_continuous_buffer(AVCodecContext > > *avctx, AVFrame *frame, > > frame->linesize[0] = FFALIGN(avctx->width, 128); > > break; > > case AV_PIX_FMT_P010: > > + case AV_PIX_FMT_YUYV422: > > frame->linesize[0] = 2 * FFALIGN(avctx->width, 128); > > break; > > + case AV_PIX_FMT_Y210: > > + frame->linesize[0] = 4 * FFALIGN(avctx->width, 128); > > + break; > > default: > > av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n"); > > return AVERROR(EINVAL); > > } > > > > - frame->linesize[1] = frame->linesize[0]; > > frame->buf[0] = av_buffer_pool_get(pool); > > if (!frame->buf[0]) > > return AVERROR(ENOMEM); > > > > frame->data[0] = frame->buf[0]->data; > > - frame->data[1] = frame->data[0] + > > - frame->linesize[0] * FFALIGN(avctx->height, > > 64); > > + if (avctx->pix_fmt == AV_PIX_FMT_NV12 || > > + avctx->pix_fmt == AV_PIX_FMT_P010) { > > + frame->linesize[1] = frame->linesize[0]; > > + frame->data[1] = frame->data[0] + > > + frame->linesize[0] * FFALIGN(avctx->height, 64); > > + } > > > > ret = ff_attach_decode_data(frame); > > if (ret < 0) > > @@ -426,9 +433,11 @@ static int alloc_frame(AVCodecContext *avctx, > QSVContext > > *q, QSVFrame *frame) > > if (frame->frame->format == AV_PIX_FMT_QSV) { > > frame->surface = *(mfxFrameSurface1*)frame->frame->data[3]; > > } else { > > - frame->surface.Data.PitchLow = frame->frame->linesize[0]; > > - frame->surface.Data.Y = frame->frame->data[0]; > > - frame->surface.Data.UV = frame->frame->data[1]; > > + ret = ff_qsv_map_frame_to_surface(frame->frame, &frame- > >surface); > > + if (ret < 0) { > > + av_log(avctx, AV_LOG_ERROR, "map frame to surface failed.\n"); > > + return AVERROR(EINVAL); > > + } > > } > > > > frame->surface.Info = q->frame_info; > > @@ -993,6 +1002,8 @@ const FFCodec ff_##x##_qsv_decoder = { \ > > .p.priv_class = &x##_qsv_class, \ > > .p.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12, \ > > AV_PIX_FMT_P010, \ > > + AV_PIX_FMT_YUYV422, \ > > + AV_PIX_FMT_Y210, \ > > AV_PIX_FMT_QSV, \ > > AV_PIX_FMT_NONE }, \ > > .hw_configs = qsv_hw_configs, \ > _______________________________________________ > ffmpeg-devel mailing list > ffmpeg-devel@ffmpeg.org > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel > > To unsubscribe, visit link above, or email > ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe". _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".