Message ID | 20220406084804.1043196-1-wenbin.chen@intel.com |
---|---|
State | Accepted |
Commit | e0ae810da3ed0b4489d0dc35c40aa239ba213ec6 |
Headers | show |
Series | [FFmpeg-devel,v3,1/2] libavcodec/qsvdec: Add more pixel format support to qsvdec | expand |
Context | Check | Description |
---|---|---|
andriy/make_x86 | success | Make finished |
andriy/make_fate_x86 | success | Make fate finished |
On Wed, 2022-04-06 at 16:48 +0800, Wenbin Chen wrote: > Qsv decoder only supports directly output nv12 and p010 to system > memory. For other format, we need to download frame from qsv format > to system memory. Now add other supported format to qsvdec. > > Signed-off-by: Wenbin Chen <wenbin.chen@intel.com> > --- > libavcodec/qsv.c | 36 ++++++++++++++++++++++++++++++++++++ > libavcodec/qsv_internal.h | 3 +++ > libavcodec/qsvdec.c | 23 +++++++++++++++++------ > 3 files changed, 56 insertions(+), 6 deletions(-) > > diff --git a/libavcodec/qsv.c b/libavcodec/qsv.c > index 67d0e3934a..b86c20b153 100644 > --- a/libavcodec/qsv.c > +++ b/libavcodec/qsv.c > @@ -244,6 +244,42 @@ int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t > *fourcc) > } > } > > +int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 > *surface) > +{ > + switch (frame->format) { > + case AV_PIX_FMT_NV12: > + case AV_PIX_FMT_P010: > + surface->Data.Y = frame->data[0]; > + surface->Data.UV = frame->data[1]; > + /* The SDK checks Data.V when using system memory for VP9 encoding */ > + surface->Data.V = surface->Data.UV + 1; > + break; > + case AV_PIX_FMT_X2RGB10LE: > + case AV_PIX_FMT_BGRA: > + surface->Data.B = frame->data[0]; > + surface->Data.G = frame->data[0] + 1; > + surface->Data.R = frame->data[0] + 2; > + surface->Data.A = frame->data[0] + 3; > + break; > + case AV_PIX_FMT_YUYV422: > + surface->Data.Y = frame->data[0]; > + surface->Data.U = frame->data[0] + 1; > + surface->Data.V = frame->data[0] + 3; > + break; > + > + case AV_PIX_FMT_Y210: > + surface->Data.Y16 = (mfxU16 *)frame->data[0]; > + surface->Data.U16 = (mfxU16 *)frame->data[0] + 1; > + surface->Data.V16 = (mfxU16 *)frame->data[0] + 3; > + break; > + default: > + return AVERROR(ENOSYS); > + } > + surface->Data.PitchLow = frame->linesize[0]; > + > + return 0; > +} > + > int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame) > { > int i; > diff --git a/libavcodec/qsv_internal.h b/libavcodec/qsv_internal.h > index 58186ea7ca..e2aecdcbd6 100644 > --- a/libavcodec/qsv_internal.h > +++ b/libavcodec/qsv_internal.h > @@ -147,4 +147,7 @@ int ff_qsv_find_surface_idx(QSVFramesContext *ctx, > QSVFrame *frame); > void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, > mfxExtBuffer *param); > > +int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 > *surface); > + > + > #endif /* AVCODEC_QSV_INTERNAL_H */ > diff --git a/libavcodec/qsvdec.c b/libavcodec/qsvdec.c > index de4af1754d..c4296f80d7 100644 > --- a/libavcodec/qsvdec.c > +++ b/libavcodec/qsvdec.c > @@ -132,21 +132,28 @@ static int qsv_get_continuous_buffer(AVCodecContext > *avctx, AVFrame *frame, > frame->linesize[0] = FFALIGN(avctx->width, 128); > break; > case AV_PIX_FMT_P010: > + case AV_PIX_FMT_YUYV422: > frame->linesize[0] = 2 * FFALIGN(avctx->width, 128); > break; > + case AV_PIX_FMT_Y210: > + frame->linesize[0] = 4 * FFALIGN(avctx->width, 128); > + break; > default: > av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n"); > return AVERROR(EINVAL); > } > > - frame->linesize[1] = frame->linesize[0]; > frame->buf[0] = av_buffer_pool_get(pool); > if (!frame->buf[0]) > return AVERROR(ENOMEM); > > frame->data[0] = frame->buf[0]->data; > - frame->data[1] = frame->data[0] + > - frame->linesize[0] * FFALIGN(avctx->height, 64); > + if (avctx->pix_fmt == AV_PIX_FMT_NV12 || > + avctx->pix_fmt == AV_PIX_FMT_P010) { > + frame->linesize[1] = frame->linesize[0]; > + frame->data[1] = frame->data[0] + > + frame->linesize[0] * FFALIGN(avctx->height, 64); > + } > > ret = ff_attach_decode_data(frame); > if (ret < 0) > @@ -426,9 +433,11 @@ static int alloc_frame(AVCodecContext *avctx, QSVContext > *q, QSVFrame *frame) > if (frame->frame->format == AV_PIX_FMT_QSV) { > frame->surface = *(mfxFrameSurface1*)frame->frame->data[3]; > } else { > - frame->surface.Data.PitchLow = frame->frame->linesize[0]; > - frame->surface.Data.Y = frame->frame->data[0]; > - frame->surface.Data.UV = frame->frame->data[1]; > + ret = ff_qsv_map_frame_to_surface(frame->frame, &frame->surface); > + if (ret < 0) { > + av_log(avctx, AV_LOG_ERROR, "map frame to surface failed.\n"); > + return ret; > + } > } > > frame->surface.Info = q->frame_info; > @@ -992,6 +1001,8 @@ const FFCodec ff_##x##_qsv_decoder = { \ > .p.priv_class = &x##_qsv_class, \ > .p.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12, \ > AV_PIX_FMT_P010, \ > + AV_PIX_FMT_YUYV422, \ > + AV_PIX_FMT_Y210, \ > AV_PIX_FMT_QSV, \ > AV_PIX_FMT_NONE }, \ > .hw_configs = qsv_hw_configs, \ Patchset LGTM and applied. -Haihao
diff --git a/libavcodec/qsv.c b/libavcodec/qsv.c index 67d0e3934a..b86c20b153 100644 --- a/libavcodec/qsv.c +++ b/libavcodec/qsv.c @@ -244,6 +244,42 @@ int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc) } } +int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface) +{ + switch (frame->format) { + case AV_PIX_FMT_NV12: + case AV_PIX_FMT_P010: + surface->Data.Y = frame->data[0]; + surface->Data.UV = frame->data[1]; + /* The SDK checks Data.V when using system memory for VP9 encoding */ + surface->Data.V = surface->Data.UV + 1; + break; + case AV_PIX_FMT_X2RGB10LE: + case AV_PIX_FMT_BGRA: + surface->Data.B = frame->data[0]; + surface->Data.G = frame->data[0] + 1; + surface->Data.R = frame->data[0] + 2; + surface->Data.A = frame->data[0] + 3; + break; + case AV_PIX_FMT_YUYV422: + surface->Data.Y = frame->data[0]; + surface->Data.U = frame->data[0] + 1; + surface->Data.V = frame->data[0] + 3; + break; + + case AV_PIX_FMT_Y210: + surface->Data.Y16 = (mfxU16 *)frame->data[0]; + surface->Data.U16 = (mfxU16 *)frame->data[0] + 1; + surface->Data.V16 = (mfxU16 *)frame->data[0] + 3; + break; + default: + return AVERROR(ENOSYS); + } + surface->Data.PitchLow = frame->linesize[0]; + + return 0; +} + int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame) { int i; diff --git a/libavcodec/qsv_internal.h b/libavcodec/qsv_internal.h index 58186ea7ca..e2aecdcbd6 100644 --- a/libavcodec/qsv_internal.h +++ b/libavcodec/qsv_internal.h @@ -147,4 +147,7 @@ int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame); void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param); +int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface); + + #endif /* AVCODEC_QSV_INTERNAL_H */ diff --git a/libavcodec/qsvdec.c b/libavcodec/qsvdec.c index de4af1754d..c4296f80d7 100644 --- a/libavcodec/qsvdec.c +++ b/libavcodec/qsvdec.c @@ -132,21 +132,28 @@ static int qsv_get_continuous_buffer(AVCodecContext *avctx, AVFrame *frame, frame->linesize[0] = FFALIGN(avctx->width, 128); break; case AV_PIX_FMT_P010: + case AV_PIX_FMT_YUYV422: frame->linesize[0] = 2 * FFALIGN(avctx->width, 128); break; + case AV_PIX_FMT_Y210: + frame->linesize[0] = 4 * FFALIGN(avctx->width, 128); + break; default: av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n"); return AVERROR(EINVAL); } - frame->linesize[1] = frame->linesize[0]; frame->buf[0] = av_buffer_pool_get(pool); if (!frame->buf[0]) return AVERROR(ENOMEM); frame->data[0] = frame->buf[0]->data; - frame->data[1] = frame->data[0] + - frame->linesize[0] * FFALIGN(avctx->height, 64); + if (avctx->pix_fmt == AV_PIX_FMT_NV12 || + avctx->pix_fmt == AV_PIX_FMT_P010) { + frame->linesize[1] = frame->linesize[0]; + frame->data[1] = frame->data[0] + + frame->linesize[0] * FFALIGN(avctx->height, 64); + } ret = ff_attach_decode_data(frame); if (ret < 0) @@ -426,9 +433,11 @@ static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame) if (frame->frame->format == AV_PIX_FMT_QSV) { frame->surface = *(mfxFrameSurface1*)frame->frame->data[3]; } else { - frame->surface.Data.PitchLow = frame->frame->linesize[0]; - frame->surface.Data.Y = frame->frame->data[0]; - frame->surface.Data.UV = frame->frame->data[1]; + ret = ff_qsv_map_frame_to_surface(frame->frame, &frame->surface); + if (ret < 0) { + av_log(avctx, AV_LOG_ERROR, "map frame to surface failed.\n"); + return ret; + } } frame->surface.Info = q->frame_info; @@ -992,6 +1001,8 @@ const FFCodec ff_##x##_qsv_decoder = { \ .p.priv_class = &x##_qsv_class, \ .p.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12, \ AV_PIX_FMT_P010, \ + AV_PIX_FMT_YUYV422, \ + AV_PIX_FMT_Y210, \ AV_PIX_FMT_QSV, \ AV_PIX_FMT_NONE }, \ .hw_configs = qsv_hw_configs, \
Qsv decoder only supports directly output nv12 and p010 to system memory. For other format, we need to download frame from qsv format to system memory. Now add other supported format to qsvdec. Signed-off-by: Wenbin Chen <wenbin.chen@intel.com> --- libavcodec/qsv.c | 36 ++++++++++++++++++++++++++++++++++++ libavcodec/qsv_internal.h | 3 +++ libavcodec/qsvdec.c | 23 +++++++++++++++++------ 3 files changed, 56 insertions(+), 6 deletions(-)