diff mbox

[FFmpeg-devel,v2] avfilter/vaapi: add overlay_vaapi filter

Message ID 20190611025136.9391-1-zachary.zhou@intel.com
State Superseded
Headers show

Commit Message

Zachary Zhou June 11, 2019, 2:51 a.m. UTC
---
 configure                      |   1 +
 libavfilter/Makefile           |   1 +
 libavfilter/allfilters.c       |   1 +
 libavfilter/vaapi_vpp.c        |  95 +++++++++
 libavfilter/vaapi_vpp.h        |   5 +
 libavfilter/vf_overlay_vaapi.c | 357 +++++++++++++++++++++++++++++++++
 6 files changed, 460 insertions(+)
 create mode 100644 libavfilter/vf_overlay_vaapi.c

Comments

Zachary Zhou June 19, 2019, 1:39 a.m. UTC | #1
> -----Original Message-----
> From: Zhou, Zachary
> Sent: Tuesday, June 11, 2019 10:52 AM
> To: ffmpeg-devel@ffmpeg.org
> Cc: Zhou, Zachary <zachary.zhou@intel.com>
> Subject: [PATCH v2] avfilter/vaapi: add overlay_vaapi filter
> 
> ---
>  configure                      |   1 +
>  libavfilter/Makefile           |   1 +
>  libavfilter/allfilters.c       |   1 +
>  libavfilter/vaapi_vpp.c        |  95 +++++++++
>  libavfilter/vaapi_vpp.h        |   5 +
>  libavfilter/vf_overlay_vaapi.c | 357 +++++++++++++++++++++++++++++++++
>  6 files changed, 460 insertions(+)
>  create mode 100644 libavfilter/vf_overlay_vaapi.c
> 

Hi Mark,

Can you help to review this patch ?

Thanks
Zachary
Jun Zhao June 19, 2019, 2:12 a.m. UTC | #2
?
On Tue, Jun 11, 2019 at 10:52 AM Zachary Zhou <zachary.zhou@intel.com> wrote:
>
> ---
>  configure                      |   1 +
>  libavfilter/Makefile           |   1 +
>  libavfilter/allfilters.c       |   1 +
>  libavfilter/vaapi_vpp.c        |  95 +++++++++
>  libavfilter/vaapi_vpp.h        |   5 +
>  libavfilter/vf_overlay_vaapi.c | 357 +++++++++++++++++++++++++++++++++
>  6 files changed, 460 insertions(+)
>  create mode 100644 libavfilter/vf_overlay_vaapi.c
>
> diff --git a/configure b/configure
> index 32fc26356c..f469e6a3b1 100755
> --- a/configure
> +++ b/configure
> @@ -3478,6 +3478,7 @@ openclsrc_filter_deps="opencl"
>  overlay_opencl_filter_deps="opencl"
>  overlay_qsv_filter_deps="libmfx"
>  overlay_qsv_filter_select="qsvvpp"
> +overlay_vaapi_filter_deps="vaapi"
>  owdenoise_filter_deps="gpl"
>  pan_filter_deps="swresample"
>  perspective_filter_deps="gpl"
> diff --git a/libavfilter/Makefile b/libavfilter/Makefile
> index 07ea8d7edc..5cbf1a7e41 100644
> --- a/libavfilter/Makefile
> +++ b/libavfilter/Makefile
> @@ -311,6 +311,7 @@ OBJS-$(CONFIG_OVERLAY_FILTER)                += vf_overlay.o framesync.o
>  OBJS-$(CONFIG_OVERLAY_OPENCL_FILTER)         += vf_overlay_opencl.o opencl.o \
>                                                  opencl/overlay.o framesync.o
>  OBJS-$(CONFIG_OVERLAY_QSV_FILTER)            += vf_overlay_qsv.o framesync.o
> +OBJS-$(CONFIG_OVERLAY_VAAPI_FILTER)          += vf_overlay_vaapi.o framesync.o
>  OBJS-$(CONFIG_OWDENOISE_FILTER)              += vf_owdenoise.o
>  OBJS-$(CONFIG_PAD_FILTER)                    += vf_pad.o
>  OBJS-$(CONFIG_PALETTEGEN_FILTER)             += vf_palettegen.o
> diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
> index 9c846b1ddd..27ee1df78b 100644
> --- a/libavfilter/allfilters.c
> +++ b/libavfilter/allfilters.c
> @@ -295,6 +295,7 @@ extern AVFilter ff_vf_oscilloscope;
>  extern AVFilter ff_vf_overlay;
>  extern AVFilter ff_vf_overlay_opencl;
>  extern AVFilter ff_vf_overlay_qsv;
> +extern AVFilter ff_vf_overlay_vaapi;
>  extern AVFilter ff_vf_owdenoise;
>  extern AVFilter ff_vf_pad;
>  extern AVFilter ff_vf_palettegen;
> diff --git a/libavfilter/vaapi_vpp.c b/libavfilter/vaapi_vpp.c
> index b5b245c8af..a8caa5b532 100644
> --- a/libavfilter/vaapi_vpp.c
> +++ b/libavfilter/vaapi_vpp.c
> @@ -663,6 +663,101 @@ fail:
>      return err;
>  }
>
> +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,
> +                                VAProcPipelineParameterBuffer *params,
> +                                VAProcPipelineParameterBuffer *subpic_params,
> +                                VASurfaceID output_surface)
> +{
> +    VABufferID params_id;
> +    VABufferID subpic_params_id;
> +    VAStatus vas;
> +    int err = 0;
> +    VAAPIVPPContext *ctx   = avctx->priv;
> +
> +    vas = vaBeginPicture(ctx->hwctx->display,
> +                         ctx->va_context, output_surface);
> +    if (vas != VA_STATUS_SUCCESS) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
> +               "%d (%s).\n", vas, vaErrorStr(vas));
> +        err = AVERROR(EIO);
> +        goto fail;
> +    }
> +
> +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
> +                         VAProcPipelineParameterBufferType,
> +                         sizeof(*params), 1, params, &params_id);
> +    if (vas != VA_STATUS_SUCCESS) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
> +               "%d (%s).\n", vas, vaErrorStr(vas));
> +        err = AVERROR(EIO);
> +        goto fail_after_begin;
> +    }
> +    av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
> +           params_id);
> +
> +
> +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
> +                         VAProcPipelineParameterBufferType,
> +                         sizeof(*subpic_params), 1, subpic_params, &subpic_params_id);
> +    if (vas != VA_STATUS_SUCCESS) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
> +               "%d (%s).\n", vas, vaErrorStr(vas));
> +        err = AVERROR(EIO);
> +        goto fail_after_begin;
> +    }
> +    av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer is %#x.\n",
> +           subpic_params_id);
> +
> +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
> +                          &params_id, 1);
> +    if (vas != VA_STATUS_SUCCESS) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
> +               "%d (%s).\n", vas, vaErrorStr(vas));
> +        err = AVERROR(EIO);
> +        goto fail_after_begin;
> +    }
> +
> +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
> +                          &subpic_params_id, 1);
> +    if (vas != VA_STATUS_SUCCESS) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter buffer: "
> +               "%d (%s).\n", vas, vaErrorStr(vas));
> +        err = AVERROR(EIO);
> +        goto fail_after_begin;
> +    }
> +
> +    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
> +    if (vas != VA_STATUS_SUCCESS) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
> +               "%d (%s).\n", vas, vaErrorStr(vas));
> +        err = AVERROR(EIO);
> +        goto fail_after_render;
> +    }
> +
> +    if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
> +        AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
> +        vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
> +        if (vas != VA_STATUS_SUCCESS) {
> +            av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
> +                   "%d (%s).\n", vas, vaErrorStr(vas));
> +            // And ignore.
> +        }
> +    }
> +
> +    return 0;
> +
> +    // We want to make sure that if vaBeginPicture has been called, we also
> +    // call vaRenderPicture and vaEndPicture.  These calls may well fail or
> +    // do something else nasty, but once we're in this failure case there
> +    // isn't much else we can do.
> +fail_after_begin:
> +    vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
> +fail_after_render:
> +    vaEndPicture(ctx->hwctx->display, ctx->va_context);
> +fail:
> +    return err;
> +}
> +
>  void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
>  {
>      int i;
> diff --git a/libavfilter/vaapi_vpp.h b/libavfilter/vaapi_vpp.h
> index c3da91717c..8d53acdec9 100644
> --- a/libavfilter/vaapi_vpp.h
> +++ b/libavfilter/vaapi_vpp.h
> @@ -83,4 +83,9 @@ int ff_vaapi_vpp_render_picture(AVFilterContext *avctx,
>                                  VAProcPipelineParameterBuffer *params,
>                                  AVFrame *output_frame);
>
> +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,
> +                                VAProcPipelineParameterBuffer *primary_params,
> +                                VAProcPipelineParameterBuffer *subpic_params,
> +                                VASurfaceID output_surface);
> +
>  #endif /* AVFILTER_VAAPI_VPP_H */
> diff --git a/libavfilter/vf_overlay_vaapi.c b/libavfilter/vf_overlay_vaapi.c
> new file mode 100644
> index 0000000000..0f72b23611
> --- /dev/null
> +++ b/libavfilter/vf_overlay_vaapi.c
> @@ -0,0 +1,357 @@
> +/*
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +#include <string.h>
> +
> +#include "libavutil/avassert.h"
> +#include "libavutil/mem.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/pixdesc.h"
> +
> +#include "avfilter.h"
> +#include "framesync.h"
> +#include "formats.h"
> +#include "internal.h"
> +#include "vaapi_vpp.h"
> +
> +#if VA_CHECK_VERSION(2, 1, 0)
> +// Blend State
> +static VABlendState blend_state = { /** \brief Video blending flags. */
> +    .flags        = VA_BLEND_PREMULTIPLIED_ALPHA, //VA_BLEND_GLOBAL_ALPHA,
> +    .global_alpha = 1,
> +    .min_luma     = 0,
> +    .max_luma     = 1
> +};
> +#endif
> +
> +typedef struct OverlayVAAPIContext {
> +    VAAPIVPPContext  vpp_ctx; // must be the first field
> +
> +    FFFrameSync      fs;
> +
> +    int              overlay_x;
> +    int              overlay_y;
> +    int              overlay_w;
> +    int              overlay_h;
> +    float            overlay_alpha;
> +} OverlayVAAPIContext;
> +
> +static int overlay_vaapi_query_formats(AVFilterContext *ctx)
> +{
> +    int i;
> +    int ret;
> +
> +    static const enum AVPixelFormat main_in_fmts[] = {
> +        AV_PIX_FMT_NV12,
> +        AV_PIX_FMT_RGB32,
> +        AV_PIX_FMT_VAAPI,
> +        AV_PIX_FMT_NONE
> +    };
> +    static const enum AVPixelFormat out_pix_fmts[] = {
> +        AV_PIX_FMT_NV12,
> +        AV_PIX_FMT_RGB32,
> +        AV_PIX_FMT_VAAPI,
> +        AV_PIX_FMT_NONE
> +    };
> +
> +    for (i = 0; i < ctx->nb_inputs; i++) {
> +        ret = ff_formats_ref(ff_make_format_list(main_in_fmts), &ctx->inputs[i]->out_formats);
> +        if (ret < 0)
> +            return ret;
> +    }
> +
> +    ret = ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx->outputs[0]->in_formats);
> +    if (ret < 0)
> +        return ret;
> +
> +    return 0;
> +}
> +
> +static int overlay_vaapi_blend(FFFrameSync *fs)
> +{
> +    AVFilterContext    *avctx = fs->parent;
> +    AVFilterLink     *outlink = avctx->outputs[0];
> +    OverlayVAAPIContext *ctx  = avctx->priv;
> +    VAAPIVPPContext *vpp_ctx  = avctx->priv;
> +    AVFrame *input_main, *input_overlay;
> +    AVFrame *output;
> +    int err;
> +    VASurfaceID main_surface, overlay_surface, output_surface;
> +    VARectangle main_region, overlay_region, output_region;
> +    VAProcPipelineParameterBuffer params, subpic_params;
> +
> +    err = ff_framesync_get_frame(fs, 0, &input_main, 0);
> +    if (err < 0)
> +        return err;
> +    err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
> +    if (err < 0)
> +        return err;
> +
> +    av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
> +           av_get_pix_fmt_name(input_main->format),
> +           input_main->width, input_main->height, input_main->pts);
> +
> +    av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
> +           av_get_pix_fmt_name(input_overlay->format),
> +           input_overlay->width, input_overlay->height, input_overlay->pts);
> +
> +    if (vpp_ctx->va_context == VA_INVALID_ID)
> +        return AVERROR(EINVAL);
> +
> +    main_surface = (VASurfaceID)(uintptr_t)input_main->data[3];
> +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp main.\n",
> +           main_surface);
> +
> +    overlay_surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
> +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp overlay.\n",
> +           overlay_surface);
> +
> +    output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
> +    if (!output) {
> +        err = AVERROR(ENOMEM);
> +        goto fail;
> +    }
> +
> +    output_surface = (VASurfaceID)(uintptr_t)output->data[3];
> +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp output.\n",
> +           output_surface);
> +
> +    memset(&params, 0, sizeof(params));
> +    memset(&subpic_params, 0, sizeof(subpic_params));
> +
> +    main_region = (VARectangle) {
> +        .x      = 0,
> +        .y      = 0,
> +        .width  = input_main->width,
> +        .height = input_main->height,
> +    };
> +
> +    overlay_region = (VARectangle) {
> +        .x      = ctx->overlay_x,
> +        .y      = ctx->overlay_y,
> +        .width  = input_overlay->width,
> +        .height = input_overlay->height,
> +    };
> +
> +    output_region = (VARectangle) {
> +        .x      = 0,
> +        .y      = 0,
> +        .width  = output->width,
> +        .height = output->height,
> +    };
> +
Some question, if overlay_width > main_width && overlay_height >
main_height, what's the output in this filter?

How about overlay_x > main_width && overlay_y > main_height?

> +    if (!ctx->overlay_w && !ctx->overlay_h) {
> +        overlay_region.width = ctx->overlay_w;
> +        overlay_region.height = ctx->overlay_h;
> +    }
> +
> +    params.filters     = &vpp_ctx->filter_buffers[0];
> +    params.num_filters = vpp_ctx->nb_filter_buffers;
> +
> +    params.surface = main_surface;
> +    params.surface_region = &main_region;
> +
> +    switch (input_main->colorspace) {
> +    case AVCOL_SPC_BT470BG:
> +        params.surface_color_standard = VAProcColorStandardBT601;
> +        break;
> +    case AVCOL_SPC_BT709:
> +        params.surface_color_standard = VAProcColorStandardBT709;
> +        break;
> +    default:
> +        params.surface_color_standard = VAProcColorStandardNone;
> +        break;
> +    }
> +
> +    params.output_region = &output_region;
> +    params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
> +    params.output_color_standard = params.surface_color_standard;
> +
> +    memcpy(&subpic_params, &params, sizeof(subpic_params));
> +    subpic_params.pipeline_flags |= VA_PROC_PIPELINE_FAST;
> +    subpic_params.filter_flags |= VA_FILTER_SCALING_FAST;
> +
> +#if VA_CHECK_VERSION(2, 1, 0)
> +    blend_state.global_alpha = ctx->overlay_alpha;
> +    subpic_params.blend_state = &blend_state;
> +#endif
> +
> +    subpic_params.surface = overlay_surface;
> +    subpic_params.output_region = &overlay_region;
> +
> +    err = ff_vaapi_vpp_render_overlay(avctx, &params, &subpic_params, output_surface);
> +    if (err < 0)
> +        goto fail;
> +
> +    err = av_frame_copy_props(output, input_main);
> +    if (err < 0)
> +        goto fail;
> +
> +    av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
> +           av_get_pix_fmt_name(output->format),
> +           output->width, output->height, output->pts);
> +
> +    return ff_filter_frame(outlink, output);
> +
> +fail:
> +    av_frame_free(&output);
> +    return err;
> +}
> +
> +static int overlay_vaapi_init_framesync(AVFilterContext *avctx)
> +{
> +    OverlayVAAPIContext *ctx = avctx->priv;
> +    int ret, i;
> +
> +    ctx->fs.on_event = overlay_vaapi_blend;
> +    ctx->fs.opaque   = ctx;
> +    ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);
> +    if (ret < 0)
> +        return ret;
> +
> +    for (i = 0; i < avctx->nb_inputs; i++) {
> +        FFFrameSyncIn *in = &ctx->fs.in[i];
> +        in->before    = EXT_STOP;
> +        in->after     = EXT_INFINITY;
> +        in->sync      = i ? 1 : 2;
> +        in->time_base = avctx->inputs[i]->time_base;
> +    }
> +
> +    return ff_framesync_configure(&ctx->fs);
> +}
> +
> +static int overlay_vaapi_config_output(AVFilterLink *outlink)
> +{
> +    AVFilterContext  *avctx  = outlink->src;
> +    OverlayVAAPIContext *ctx = avctx->priv;
> +    VAAPIVPPContext *vpp_ctx = avctx->priv;
> +    AVFilterLink        *in0 = avctx->inputs[0];
> +    AVFilterLink        *in1 = avctx->inputs[1];
> +    int err;
> +
> +    av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n", av_get_pix_fmt_name(outlink->format));
> +    if ((in0->format == AV_PIX_FMT_VAAPI && in1->format != AV_PIX_FMT_VAAPI) ||
> +        (in0->format != AV_PIX_FMT_VAAPI && in1->format == AV_PIX_FMT_VAAPI)) {
> +        av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software pixel formats is not supported.\n");
> +        return AVERROR(EINVAL);
> +    }
> +
> +    err = overlay_vaapi_init_framesync(avctx);
> +    if (err < 0)
> +        return err;
> +
> +    vpp_ctx->output_width  = avctx->inputs[0]->w;
> +    vpp_ctx->output_height = avctx->inputs[0]->h;
> +
> +    err = ff_vaapi_vpp_config_output(outlink);
> +    if (err < 0)
> +        return err;
> +
> +    err = ff_framesync_init_dualinput(&ctx->fs, avctx);
> +    if (err < 0)
> +        return err;
> +
> +    return ff_framesync_configure(&ctx->fs);
> +}
> +
> +static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
> +{
> +    VAAPIVPPContext *vpp_ctx = avctx->priv;
> +
> +    ff_vaapi_vpp_ctx_init(avctx);
> +    vpp_ctx->output_format = AV_PIX_FMT_NONE;
> +
> +    return 0;
> +}
> +
> +static int overlay_vaapi_activate(AVFilterContext *avctx)
> +{
> +    OverlayVAAPIContext *ctx = avctx->priv;
> +
> +    return ff_framesync_activate(&ctx->fs);
> +}
> +
> +static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
> +{
> +    OverlayVAAPIContext *ctx = avctx->priv;
> +
> +    ff_framesync_uninit(&ctx->fs);
> +}
> +
> +static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h)
> +{
> +    return ff_default_get_video_buffer(inlink, w, h);
> +}
> +
> +#define OFFSET(x) offsetof(OverlayVAAPIContext, x)
> +#define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
> +static const AVOption overlay_vaapi_options[] = {
> +    { "x", "Overlay x position",
> +      OFFSET(overlay_x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
> +    { "y", "Overlay y position",
> +      OFFSET(overlay_y), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
> +    { "w", "Overlay width",
> +      OFFSET(overlay_w), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
> +    { "h", "Overlay hight",
> +      OFFSET(overlay_h), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
> +    { "alpha", "Overlay global alpha",
> +      OFFSET(overlay_alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0}, 0.0, 1.0, .flags = FLAGS},
> +    { NULL },
> +};
> +
> +AVFILTER_DEFINE_CLASS(overlay_vaapi);
> +
> +static const AVFilterPad overlay_vaapi_inputs[] = {
> +    {
> +        .name             = "main",
> +        .type             = AVMEDIA_TYPE_VIDEO,
> +        .get_video_buffer = get_video_buffer,
> +        .config_props     = &ff_vaapi_vpp_config_input,
> +        .needs_fifo       = 1,
> +    },
> +    {
> +        .name             = "overlay",
> +        .type             = AVMEDIA_TYPE_VIDEO,
> +        .get_video_buffer = get_video_buffer,
> +        .config_props     = &ff_vaapi_vpp_config_input,
> +        .needs_fifo       = 1,
> +    },
> +    { NULL }
> +};
> +
> +static const AVFilterPad overlay_vaapi_outputs[] = {
> +    {
> +        .name          = "default",
> +        .type          = AVMEDIA_TYPE_VIDEO,
> +        .config_props  = &overlay_vaapi_config_output,
> +    },
> +    { NULL }
> +};
> +
> +AVFilter ff_vf_overlay_vaapi = {
> +    .name            = "overlay_vaapi",
> +    .description     = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
> +    .priv_size       = sizeof(OverlayVAAPIContext),
> +    .priv_class      = &overlay_vaapi_class,
> +    .init            = &overlay_vaapi_init,
> +    .uninit          = &overlay_vaapi_uninit,
> +    .query_formats   = &overlay_vaapi_query_formats,
> +    .activate        = &overlay_vaapi_activate,
> +    .inputs          = overlay_vaapi_inputs,
> +    .outputs         = overlay_vaapi_outputs,
> +    .flags_internal  = FF_FILTER_FLAG_HWFRAME_AWARE,
> +};
> --
> 2.17.1
>
Zachary Zhou June 19, 2019, 9:26 a.m. UTC | #3
> -----Original Message-----

> From: ffmpeg-devel [mailto:ffmpeg-devel-bounces@ffmpeg.org] On Behalf Of

> mypopy@gmail.com

> Sent: Wednesday, June 19, 2019 10:13 AM

> To: FFmpeg development discussions and patches <ffmpeg-devel@ffmpeg.org>

> Subject: Re: [FFmpeg-devel] [PATCH v2] avfilter/vaapi: add overlay_vaapi filter

> 

>  ?

> On Tue, Jun 11, 2019 at 10:52 AM Zachary Zhou <zachary.zhou@intel.com>

> wrote:

> >

> > ---

> >  configure                      |   1 +

> >  libavfilter/Makefile           |   1 +

> >  libavfilter/allfilters.c       |   1 +

> >  libavfilter/vaapi_vpp.c        |  95 +++++++++

> >  libavfilter/vaapi_vpp.h        |   5 +

> >  libavfilter/vf_overlay_vaapi.c | 357

> > +++++++++++++++++++++++++++++++++

> >  6 files changed, 460 insertions(+)

> >  create mode 100644 libavfilter/vf_overlay_vaapi.c

> >

> > diff --git a/configure b/configure

> > index 32fc26356c..f469e6a3b1 100755

> > --- a/configure

> > +++ b/configure

> > @@ -3478,6 +3478,7 @@ openclsrc_filter_deps="opencl"

> >  overlay_opencl_filter_deps="opencl"

> >  overlay_qsv_filter_deps="libmfx"

> >  overlay_qsv_filter_select="qsvvpp"

> > +overlay_vaapi_filter_deps="vaapi"

> >  owdenoise_filter_deps="gpl"

> >  pan_filter_deps="swresample"

> >  perspective_filter_deps="gpl"

> > diff --git a/libavfilter/Makefile b/libavfilter/Makefile index

> > 07ea8d7edc..5cbf1a7e41 100644

> > --- a/libavfilter/Makefile

> > +++ b/libavfilter/Makefile

> > @@ -311,6 +311,7 @@ OBJS-$(CONFIG_OVERLAY_FILTER)                +=

> vf_overlay.o framesync.o

> >  OBJS-$(CONFIG_OVERLAY_OPENCL_FILTER)         += vf_overlay_opencl.o

> opencl.o \

> >                                                  opencl/overlay.o framesync.o

> >  OBJS-$(CONFIG_OVERLAY_QSV_FILTER)            += vf_overlay_qsv.o

> framesync.o

> > +OBJS-$(CONFIG_OVERLAY_VAAPI_FILTER)          += vf_overlay_vaapi.o

> framesync.o

> >  OBJS-$(CONFIG_OWDENOISE_FILTER)              += vf_owdenoise.o

> >  OBJS-$(CONFIG_PAD_FILTER)                    += vf_pad.o

> >  OBJS-$(CONFIG_PALETTEGEN_FILTER)             += vf_palettegen.o

> > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c index

> > 9c846b1ddd..27ee1df78b 100644

> > --- a/libavfilter/allfilters.c

> > +++ b/libavfilter/allfilters.c

> > @@ -295,6 +295,7 @@ extern AVFilter ff_vf_oscilloscope;  extern

> > AVFilter ff_vf_overlay;  extern AVFilter ff_vf_overlay_opencl;  extern

> > AVFilter ff_vf_overlay_qsv;

> > +extern AVFilter ff_vf_overlay_vaapi;

> >  extern AVFilter ff_vf_owdenoise;

> >  extern AVFilter ff_vf_pad;

> >  extern AVFilter ff_vf_palettegen;

> > diff --git a/libavfilter/vaapi_vpp.c b/libavfilter/vaapi_vpp.c index

> > b5b245c8af..a8caa5b532 100644

> > --- a/libavfilter/vaapi_vpp.c

> > +++ b/libavfilter/vaapi_vpp.c

> > @@ -663,6 +663,101 @@ fail:

> >      return err;

> >  }

> >

> > +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,

> > +                                VAProcPipelineParameterBuffer *params,

> > +                                VAProcPipelineParameterBuffer *subpic_params,

> > +                                VASurfaceID output_surface) {

> > +    VABufferID params_id;

> > +    VABufferID subpic_params_id;

> > +    VAStatus vas;

> > +    int err = 0;

> > +    VAAPIVPPContext *ctx   = avctx->priv;

> > +

> > +    vas = vaBeginPicture(ctx->hwctx->display,

> > +                         ctx->va_context, output_surface);

> > +    if (vas != VA_STATUS_SUCCESS) {

> > +        av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "

> > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > +        err = AVERROR(EIO);

> > +        goto fail;

> > +    }

> > +

> > +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,

> > +                         VAProcPipelineParameterBufferType,

> > +                         sizeof(*params), 1, params, &params_id);

> > +    if (vas != VA_STATUS_SUCCESS) {

> > +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "

> > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > +        err = AVERROR(EIO);

> > +        goto fail_after_begin;

> > +    }

> > +    av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",

> > +           params_id);

> > +

> > +

> > +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,

> > +                         VAProcPipelineParameterBufferType,

> > +                         sizeof(*subpic_params), 1, subpic_params,

> &subpic_params_id);

> > +    if (vas != VA_STATUS_SUCCESS) {

> > +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "

> > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > +        err = AVERROR(EIO);

> > +        goto fail_after_begin;

> > +    }

> > +    av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer

> is %#x.\n",

> > +           subpic_params_id);

> > +

> > +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,

> > +                          &params_id, 1);

> > +    if (vas != VA_STATUS_SUCCESS) {

> > +        av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "

> > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > +        err = AVERROR(EIO);

> > +        goto fail_after_begin;

> > +    }

> > +

> > +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,

> > +                          &subpic_params_id, 1);

> > +    if (vas != VA_STATUS_SUCCESS) {

> > +        av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter

> buffer: "

> > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > +        err = AVERROR(EIO);

> > +        goto fail_after_begin;

> > +    }

> > +

> > +    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);

> > +    if (vas != VA_STATUS_SUCCESS) {

> > +        av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "

> > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > +        err = AVERROR(EIO);

> > +        goto fail_after_render;

> > +    }

> > +

> > +    if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &

> > +        AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {

> > +        vas = vaDestroyBuffer(ctx->hwctx->display, params_id);

> > +        if (vas != VA_STATUS_SUCCESS) {

> > +            av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "

> > +                   "%d (%s).\n", vas, vaErrorStr(vas));

> > +            // And ignore.

> > +        }

> > +    }

> > +

> > +    return 0;

> > +

> > +    // We want to make sure that if vaBeginPicture has been called, we also

> > +    // call vaRenderPicture and vaEndPicture.  These calls may well fail or

> > +    // do something else nasty, but once we're in this failure case there

> > +    // isn't much else we can do.

> > +fail_after_begin:

> > +    vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id,

> > +1);

> > +fail_after_render:

> > +    vaEndPicture(ctx->hwctx->display, ctx->va_context);

> > +fail:

> > +    return err;

> > +}

> > +

> >  void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)  {

> >      int i;

> > diff --git a/libavfilter/vaapi_vpp.h b/libavfilter/vaapi_vpp.h index

> > c3da91717c..8d53acdec9 100644

> > --- a/libavfilter/vaapi_vpp.h

> > +++ b/libavfilter/vaapi_vpp.h

> > @@ -83,4 +83,9 @@ int ff_vaapi_vpp_render_picture(AVFilterContext

> *avctx,

> >                                  VAProcPipelineParameterBuffer *params,

> >                                  AVFrame *output_frame);

> >

> > +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,

> > +                                VAProcPipelineParameterBuffer *primary_params,

> > +                                VAProcPipelineParameterBuffer *subpic_params,

> > +                                VASurfaceID output_surface);

> > +

> >  #endif /* AVFILTER_VAAPI_VPP_H */

> > diff --git a/libavfilter/vf_overlay_vaapi.c

> > b/libavfilter/vf_overlay_vaapi.c new file mode 100644 index

> > 0000000000..0f72b23611

> > --- /dev/null

> > +++ b/libavfilter/vf_overlay_vaapi.c

> > @@ -0,0 +1,357 @@

> > +/*

> > + * This file is part of FFmpeg.

> > + *

> > + * FFmpeg is free software; you can redistribute it and/or

> > + * modify it under the terms of the GNU Lesser General Public

> > + * License as published by the Free Software Foundation; either

> > + * version 2.1 of the License, or (at your option) any later version.

> > + *

> > + * FFmpeg is distributed in the hope that it will be useful,

> > + * but WITHOUT ANY WARRANTY; without even the implied warranty of

> > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

> GNU

> > + * Lesser General Public License for more details.

> > + *

> > + * You should have received a copy of the GNU Lesser General Public

> > + * License along with FFmpeg; if not, write to the Free Software

> > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA

> > +02110-1301 USA  */ #include <string.h>

> > +

> > +#include "libavutil/avassert.h"

> > +#include "libavutil/mem.h"

> > +#include "libavutil/opt.h"

> > +#include "libavutil/pixdesc.h"

> > +

> > +#include "avfilter.h"

> > +#include "framesync.h"

> > +#include "formats.h"

> > +#include "internal.h"

> > +#include "vaapi_vpp.h"

> > +

> > +#if VA_CHECK_VERSION(2, 1, 0)

> > +// Blend State

> > +static VABlendState blend_state = { /** \brief Video blending flags. */

> > +    .flags        = VA_BLEND_PREMULTIPLIED_ALPHA,

> //VA_BLEND_GLOBAL_ALPHA,

> > +    .global_alpha = 1,

> > +    .min_luma     = 0,

> > +    .max_luma     = 1

> > +};

> > +#endif

> > +

> > +typedef struct OverlayVAAPIContext {

> > +    VAAPIVPPContext  vpp_ctx; // must be the first field

> > +

> > +    FFFrameSync      fs;

> > +

> > +    int              overlay_x;

> > +    int              overlay_y;

> > +    int              overlay_w;

> > +    int              overlay_h;

> > +    float            overlay_alpha;

> > +} OverlayVAAPIContext;

> > +

> > +static int overlay_vaapi_query_formats(AVFilterContext *ctx) {

> > +    int i;

> > +    int ret;

> > +

> > +    static const enum AVPixelFormat main_in_fmts[] = {

> > +        AV_PIX_FMT_NV12,

> > +        AV_PIX_FMT_RGB32,

> > +        AV_PIX_FMT_VAAPI,

> > +        AV_PIX_FMT_NONE

> > +    };

> > +    static const enum AVPixelFormat out_pix_fmts[] = {

> > +        AV_PIX_FMT_NV12,

> > +        AV_PIX_FMT_RGB32,

> > +        AV_PIX_FMT_VAAPI,

> > +        AV_PIX_FMT_NONE

> > +    };

> > +

> > +    for (i = 0; i < ctx->nb_inputs; i++) {

> > +        ret = ff_formats_ref(ff_make_format_list(main_in_fmts), &ctx-

> >inputs[i]->out_formats);

> > +        if (ret < 0)

> > +            return ret;

> > +    }

> > +

> > +    ret = ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx-

> >outputs[0]->in_formats);

> > +    if (ret < 0)

> > +        return ret;

> > +

> > +    return 0;

> > +}

> > +

> > +static int overlay_vaapi_blend(FFFrameSync *fs) {

> > +    AVFilterContext    *avctx = fs->parent;

> > +    AVFilterLink     *outlink = avctx->outputs[0];

> > +    OverlayVAAPIContext *ctx  = avctx->priv;

> > +    VAAPIVPPContext *vpp_ctx  = avctx->priv;

> > +    AVFrame *input_main, *input_overlay;

> > +    AVFrame *output;

> > +    int err;

> > +    VASurfaceID main_surface, overlay_surface, output_surface;

> > +    VARectangle main_region, overlay_region, output_region;

> > +    VAProcPipelineParameterBuffer params, subpic_params;

> > +

> > +    err = ff_framesync_get_frame(fs, 0, &input_main, 0);

> > +    if (err < 0)

> > +        return err;

> > +    err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);

> > +    if (err < 0)

> > +        return err;

> > +

> > +    av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",

> > +           av_get_pix_fmt_name(input_main->format),

> > +           input_main->width, input_main->height, input_main->pts);

> > +

> > +    av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u

> (%"PRId64").\n",

> > +           av_get_pix_fmt_name(input_overlay->format),

> > +           input_overlay->width, input_overlay->height,

> > + input_overlay->pts);

> > +

> > +    if (vpp_ctx->va_context == VA_INVALID_ID)

> > +        return AVERROR(EINVAL);

> > +

> > +    main_surface = (VASurfaceID)(uintptr_t)input_main->data[3];

> > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp

> main.\n",

> > +           main_surface);

> > +

> > +    overlay_surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];

> > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp

> overlay.\n",

> > +           overlay_surface);

> > +

> > +    output = ff_get_video_buffer(outlink, outlink->w, outlink->h);

> > +    if (!output) {

> > +        err = AVERROR(ENOMEM);

> > +        goto fail;

> > +    }

> > +

> > +    output_surface = (VASurfaceID)(uintptr_t)output->data[3];

> > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp

> output.\n",

> > +           output_surface);

> > +

> > +    memset(&params, 0, sizeof(params));

> > +    memset(&subpic_params, 0, sizeof(subpic_params));

> > +

> > +    main_region = (VARectangle) {

> > +        .x      = 0,

> > +        .y      = 0,

> > +        .width  = input_main->width,

> > +        .height = input_main->height,

> > +    };

> > +

> > +    overlay_region = (VARectangle) {

> > +        .x      = ctx->overlay_x,

> > +        .y      = ctx->overlay_y,

> > +        .width  = input_overlay->width,

> > +        .height = input_overlay->height,

> > +    };

> > +

> > +    output_region = (VARectangle) {

> > +        .x      = 0,

> > +        .y      = 0,

> > +        .width  = output->width,

> > +        .height = output->height,

> > +    };

> > +

> Some question, if overlay_width > main_width && overlay_height >

> main_height, what's the output in this filter?


Thank you for the comments.
in this case, if don't reset overlay_width and overlay_height via parameter (w and h), UMD will failed to do EndPicture.

are you suggesting to add check to avoid this ?

> 

> How about overlay_x > main_width && overlay_y > main_height?

only main surface will be output in this occasion.

> 

> > +    if (!ctx->overlay_w && !ctx->overlay_h) {

> > +        overlay_region.width = ctx->overlay_w;

> > +        overlay_region.height = ctx->overlay_h;

> > +    }

> > +

> > +    params.filters     = &vpp_ctx->filter_buffers[0];

> > +    params.num_filters = vpp_ctx->nb_filter_buffers;

> > +

> > +    params.surface = main_surface;

> > +    params.surface_region = &main_region;

> > +

> > +    switch (input_main->colorspace) {

> > +    case AVCOL_SPC_BT470BG:

> > +        params.surface_color_standard = VAProcColorStandardBT601;

> > +        break;

> > +    case AVCOL_SPC_BT709:

> > +        params.surface_color_standard = VAProcColorStandardBT709;

> > +        break;

> > +    default:

> > +        params.surface_color_standard = VAProcColorStandardNone;

> > +        break;

> > +    }

> > +

> > +    params.output_region = &output_region;

> > +    params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;

> > +    params.output_color_standard = params.surface_color_standard;

> > +

> > +    memcpy(&subpic_params, &params, sizeof(subpic_params));

> > +    subpic_params.pipeline_flags |= VA_PROC_PIPELINE_FAST;

> > +    subpic_params.filter_flags |= VA_FILTER_SCALING_FAST;

> > +

> > +#if VA_CHECK_VERSION(2, 1, 0)

> > +    blend_state.global_alpha = ctx->overlay_alpha;

> > +    subpic_params.blend_state = &blend_state; #endif

> > +

> > +    subpic_params.surface = overlay_surface;

> > +    subpic_params.output_region = &overlay_region;

> > +

> > +    err = ff_vaapi_vpp_render_overlay(avctx, &params, &subpic_params,

> output_surface);

> > +    if (err < 0)

> > +        goto fail;

> > +

> > +    err = av_frame_copy_props(output, input_main);

> > +    if (err < 0)

> > +        goto fail;

> > +

> > +    av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u

> (%"PRId64").\n",

> > +           av_get_pix_fmt_name(output->format),

> > +           output->width, output->height, output->pts);

> > +

> > +    return ff_filter_frame(outlink, output);

> > +

> > +fail:

> > +    av_frame_free(&output);

> > +    return err;

> > +}

> > +

> > +static int overlay_vaapi_init_framesync(AVFilterContext *avctx) {

> > +    OverlayVAAPIContext *ctx = avctx->priv;

> > +    int ret, i;

> > +

> > +    ctx->fs.on_event = overlay_vaapi_blend;

> > +    ctx->fs.opaque   = ctx;

> > +    ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);

> > +    if (ret < 0)

> > +        return ret;

> > +

> > +    for (i = 0; i < avctx->nb_inputs; i++) {

> > +        FFFrameSyncIn *in = &ctx->fs.in[i];

> > +        in->before    = EXT_STOP;

> > +        in->after     = EXT_INFINITY;

> > +        in->sync      = i ? 1 : 2;

> > +        in->time_base = avctx->inputs[i]->time_base;

> > +    }

> > +

> > +    return ff_framesync_configure(&ctx->fs); }

> > +

> > +static int overlay_vaapi_config_output(AVFilterLink *outlink) {

> > +    AVFilterContext  *avctx  = outlink->src;

> > +    OverlayVAAPIContext *ctx = avctx->priv;

> > +    VAAPIVPPContext *vpp_ctx = avctx->priv;

> > +    AVFilterLink        *in0 = avctx->inputs[0];

> > +    AVFilterLink        *in1 = avctx->inputs[1];

> > +    int err;

> > +

> > +    av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n",

> av_get_pix_fmt_name(outlink->format));

> > +    if ((in0->format == AV_PIX_FMT_VAAPI && in1->format !=

> AV_PIX_FMT_VAAPI) ||

> > +        (in0->format != AV_PIX_FMT_VAAPI && in1->format ==

> AV_PIX_FMT_VAAPI)) {

> > +        av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software pixel

> formats is not supported.\n");

> > +        return AVERROR(EINVAL);

> > +    }

> > +

> > +    err = overlay_vaapi_init_framesync(avctx);

> > +    if (err < 0)

> > +        return err;

> > +

> > +    vpp_ctx->output_width  = avctx->inputs[0]->w;

> > +    vpp_ctx->output_height = avctx->inputs[0]->h;

> > +

> > +    err = ff_vaapi_vpp_config_output(outlink);

> > +    if (err < 0)

> > +        return err;

> > +

> > +    err = ff_framesync_init_dualinput(&ctx->fs, avctx);

> > +    if (err < 0)

> > +        return err;

> > +

> > +    return ff_framesync_configure(&ctx->fs); }

> > +

> > +static av_cold int overlay_vaapi_init(AVFilterContext *avctx) {

> > +    VAAPIVPPContext *vpp_ctx = avctx->priv;

> > +

> > +    ff_vaapi_vpp_ctx_init(avctx);

> > +    vpp_ctx->output_format = AV_PIX_FMT_NONE;

> > +

> > +    return 0;

> > +}

> > +

> > +static int overlay_vaapi_activate(AVFilterContext *avctx) {

> > +    OverlayVAAPIContext *ctx = avctx->priv;

> > +

> > +    return ff_framesync_activate(&ctx->fs); }

> > +

> > +static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx) {

> > +    OverlayVAAPIContext *ctx = avctx->priv;

> > +

> > +    ff_framesync_uninit(&ctx->fs);

> > +}

> > +

> > +static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h)

> > +{

> > +    return ff_default_get_video_buffer(inlink, w, h); }

> > +

> > +#define OFFSET(x) offsetof(OverlayVAAPIContext, x) #define FLAGS

> > +(AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM) static

> const

> > +AVOption overlay_vaapi_options[] = {

> > +    { "x", "Overlay x position",

> > +      OFFSET(overlay_x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =

> FLAGS },

> > +    { "y", "Overlay y position",

> > +      OFFSET(overlay_y), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =

> FLAGS },

> > +    { "w", "Overlay width",

> > +      OFFSET(overlay_w), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =

> FLAGS },

> > +    { "h", "Overlay hight",

> > +      OFFSET(overlay_h), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =

> FLAGS },

> > +    { "alpha", "Overlay global alpha",

> > +      OFFSET(overlay_alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0}, 0.0,

> 1.0, .flags = FLAGS},

> > +    { NULL },

> > +};

> > +

> > +AVFILTER_DEFINE_CLASS(overlay_vaapi);

> > +

> > +static const AVFilterPad overlay_vaapi_inputs[] = {

> > +    {

> > +        .name             = "main",

> > +        .type             = AVMEDIA_TYPE_VIDEO,

> > +        .get_video_buffer = get_video_buffer,

> > +        .config_props     = &ff_vaapi_vpp_config_input,

> > +        .needs_fifo       = 1,

> > +    },

> > +    {

> > +        .name             = "overlay",

> > +        .type             = AVMEDIA_TYPE_VIDEO,

> > +        .get_video_buffer = get_video_buffer,

> > +        .config_props     = &ff_vaapi_vpp_config_input,

> > +        .needs_fifo       = 1,

> > +    },

> > +    { NULL }

> > +};

> > +

> > +static const AVFilterPad overlay_vaapi_outputs[] = {

> > +    {

> > +        .name          = "default",

> > +        .type          = AVMEDIA_TYPE_VIDEO,

> > +        .config_props  = &overlay_vaapi_config_output,

> > +    },

> > +    { NULL }

> > +};

> > +

> > +AVFilter ff_vf_overlay_vaapi = {

> > +    .name            = "overlay_vaapi",

> > +    .description     = NULL_IF_CONFIG_SMALL("Overlay one video on top of

> another"),

> > +    .priv_size       = sizeof(OverlayVAAPIContext),

> > +    .priv_class      = &overlay_vaapi_class,

> > +    .init            = &overlay_vaapi_init,

> > +    .uninit          = &overlay_vaapi_uninit,

> > +    .query_formats   = &overlay_vaapi_query_formats,

> > +    .activate        = &overlay_vaapi_activate,

> > +    .inputs          = overlay_vaapi_inputs,

> > +    .outputs         = overlay_vaapi_outputs,

> > +    .flags_internal  = FF_FILTER_FLAG_HWFRAME_AWARE, };

> > --

> > 2.17.1

> >

> _______________________________________________

> ffmpeg-devel mailing list

> ffmpeg-devel@ffmpeg.org

> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

> 

> To unsubscribe, visit link above, or email ffmpeg-devel-request@ffmpeg.org

> with subject "unsubscribe".
Jun Zhao June 19, 2019, 1:21 p.m. UTC | #4
On Wed, Jun 19, 2019 at 5:26 PM Zhou, Zachary <zachary.zhou@intel.com> wrote:
>
>
>
> > -----Original Message-----
> > From: ffmpeg-devel [mailto:ffmpeg-devel-bounces@ffmpeg.org] On Behalf Of
> > mypopy@gmail.com
> > Sent: Wednesday, June 19, 2019 10:13 AM
> > To: FFmpeg development discussions and patches <ffmpeg-devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v2] avfilter/vaapi: add overlay_vaapi filter
> >
> >  ?
> > On Tue, Jun 11, 2019 at 10:52 AM Zachary Zhou <zachary.zhou@intel.com>
> > wrote:
> > >
> > > ---
> > >  configure                      |   1 +
> > >  libavfilter/Makefile           |   1 +
> > >  libavfilter/allfilters.c       |   1 +
> > >  libavfilter/vaapi_vpp.c        |  95 +++++++++
> > >  libavfilter/vaapi_vpp.h        |   5 +
> > >  libavfilter/vf_overlay_vaapi.c | 357
> > > +++++++++++++++++++++++++++++++++
> > >  6 files changed, 460 insertions(+)
> > >  create mode 100644 libavfilter/vf_overlay_vaapi.c
> > >
> > > diff --git a/configure b/configure
> > > index 32fc26356c..f469e6a3b1 100755
> > > --- a/configure
> > > +++ b/configure
> > > @@ -3478,6 +3478,7 @@ openclsrc_filter_deps="opencl"
> > >  overlay_opencl_filter_deps="opencl"
> > >  overlay_qsv_filter_deps="libmfx"
> > >  overlay_qsv_filter_select="qsvvpp"
> > > +overlay_vaapi_filter_deps="vaapi"
> > >  owdenoise_filter_deps="gpl"
> > >  pan_filter_deps="swresample"
> > >  perspective_filter_deps="gpl"
> > > diff --git a/libavfilter/Makefile b/libavfilter/Makefile index
> > > 07ea8d7edc..5cbf1a7e41 100644
> > > --- a/libavfilter/Makefile
> > > +++ b/libavfilter/Makefile
> > > @@ -311,6 +311,7 @@ OBJS-$(CONFIG_OVERLAY_FILTER)                +=
> > vf_overlay.o framesync.o
> > >  OBJS-$(CONFIG_OVERLAY_OPENCL_FILTER)         += vf_overlay_opencl.o
> > opencl.o \
> > >                                                  opencl/overlay.o framesync.o
> > >  OBJS-$(CONFIG_OVERLAY_QSV_FILTER)            += vf_overlay_qsv.o
> > framesync.o
> > > +OBJS-$(CONFIG_OVERLAY_VAAPI_FILTER)          += vf_overlay_vaapi.o
> > framesync.o
> > >  OBJS-$(CONFIG_OWDENOISE_FILTER)              += vf_owdenoise.o
> > >  OBJS-$(CONFIG_PAD_FILTER)                    += vf_pad.o
> > >  OBJS-$(CONFIG_PALETTEGEN_FILTER)             += vf_palettegen.o
> > > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c index
> > > 9c846b1ddd..27ee1df78b 100644
> > > --- a/libavfilter/allfilters.c
> > > +++ b/libavfilter/allfilters.c
> > > @@ -295,6 +295,7 @@ extern AVFilter ff_vf_oscilloscope;  extern
> > > AVFilter ff_vf_overlay;  extern AVFilter ff_vf_overlay_opencl;  extern
> > > AVFilter ff_vf_overlay_qsv;
> > > +extern AVFilter ff_vf_overlay_vaapi;
> > >  extern AVFilter ff_vf_owdenoise;
> > >  extern AVFilter ff_vf_pad;
> > >  extern AVFilter ff_vf_palettegen;
> > > diff --git a/libavfilter/vaapi_vpp.c b/libavfilter/vaapi_vpp.c index
> > > b5b245c8af..a8caa5b532 100644
> > > --- a/libavfilter/vaapi_vpp.c
> > > +++ b/libavfilter/vaapi_vpp.c
> > > @@ -663,6 +663,101 @@ fail:
> > >      return err;
> > >  }
> > >
> > > +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,
> > > +                                VAProcPipelineParameterBuffer *params,
> > > +                                VAProcPipelineParameterBuffer *subpic_params,
> > > +                                VASurfaceID output_surface) {
> > > +    VABufferID params_id;
> > > +    VABufferID subpic_params_id;
> > > +    VAStatus vas;
> > > +    int err = 0;
> > > +    VAAPIVPPContext *ctx   = avctx->priv;
> > > +
> > > +    vas = vaBeginPicture(ctx->hwctx->display,
> > > +                         ctx->va_context, output_surface);
> > > +    if (vas != VA_STATUS_SUCCESS) {
> > > +        av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
> > > +               "%d (%s).\n", vas, vaErrorStr(vas));
> > > +        err = AVERROR(EIO);
> > > +        goto fail;
> > > +    }
> > > +
> > > +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
> > > +                         VAProcPipelineParameterBufferType,
> > > +                         sizeof(*params), 1, params, &params_id);
> > > +    if (vas != VA_STATUS_SUCCESS) {
> > > +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
> > > +               "%d (%s).\n", vas, vaErrorStr(vas));
> > > +        err = AVERROR(EIO);
> > > +        goto fail_after_begin;
> > > +    }
> > > +    av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
> > > +           params_id);
> > > +
> > > +
> > > +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
> > > +                         VAProcPipelineParameterBufferType,
> > > +                         sizeof(*subpic_params), 1, subpic_params,
> > &subpic_params_id);
> > > +    if (vas != VA_STATUS_SUCCESS) {
> > > +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
> > > +               "%d (%s).\n", vas, vaErrorStr(vas));
> > > +        err = AVERROR(EIO);
> > > +        goto fail_after_begin;
> > > +    }
> > > +    av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer
> > is %#x.\n",
> > > +           subpic_params_id);
> > > +
> > > +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
> > > +                          &params_id, 1);
> > > +    if (vas != VA_STATUS_SUCCESS) {
> > > +        av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
> > > +               "%d (%s).\n", vas, vaErrorStr(vas));
> > > +        err = AVERROR(EIO);
> > > +        goto fail_after_begin;
> > > +    }
> > > +
> > > +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
> > > +                          &subpic_params_id, 1);
> > > +    if (vas != VA_STATUS_SUCCESS) {
> > > +        av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter
> > buffer: "
> > > +               "%d (%s).\n", vas, vaErrorStr(vas));
> > > +        err = AVERROR(EIO);
> > > +        goto fail_after_begin;
> > > +    }
> > > +
> > > +    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
> > > +    if (vas != VA_STATUS_SUCCESS) {
> > > +        av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
> > > +               "%d (%s).\n", vas, vaErrorStr(vas));
> > > +        err = AVERROR(EIO);
> > > +        goto fail_after_render;
> > > +    }
> > > +
> > > +    if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
> > > +        AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
> > > +        vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
> > > +        if (vas != VA_STATUS_SUCCESS) {
> > > +            av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
> > > +                   "%d (%s).\n", vas, vaErrorStr(vas));
> > > +            // And ignore.
> > > +        }
> > > +    }
> > > +
> > > +    return 0;
> > > +
> > > +    // We want to make sure that if vaBeginPicture has been called, we also
> > > +    // call vaRenderPicture and vaEndPicture.  These calls may well fail or
> > > +    // do something else nasty, but once we're in this failure case there
> > > +    // isn't much else we can do.
> > > +fail_after_begin:
> > > +    vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id,
> > > +1);
> > > +fail_after_render:
> > > +    vaEndPicture(ctx->hwctx->display, ctx->va_context);
> > > +fail:
> > > +    return err;
> > > +}
> > > +
> > >  void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)  {
> > >      int i;
> > > diff --git a/libavfilter/vaapi_vpp.h b/libavfilter/vaapi_vpp.h index
> > > c3da91717c..8d53acdec9 100644
> > > --- a/libavfilter/vaapi_vpp.h
> > > +++ b/libavfilter/vaapi_vpp.h
> > > @@ -83,4 +83,9 @@ int ff_vaapi_vpp_render_picture(AVFilterContext
> > *avctx,
> > >                                  VAProcPipelineParameterBuffer *params,
> > >                                  AVFrame *output_frame);
> > >
> > > +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,
> > > +                                VAProcPipelineParameterBuffer *primary_params,
> > > +                                VAProcPipelineParameterBuffer *subpic_params,
> > > +                                VASurfaceID output_surface);
> > > +
> > >  #endif /* AVFILTER_VAAPI_VPP_H */
> > > diff --git a/libavfilter/vf_overlay_vaapi.c
> > > b/libavfilter/vf_overlay_vaapi.c new file mode 100644 index
> > > 0000000000..0f72b23611
> > > --- /dev/null
> > > +++ b/libavfilter/vf_overlay_vaapi.c
> > > @@ -0,0 +1,357 @@
> > > +/*
> > > + * This file is part of FFmpeg.
> > > + *
> > > + * FFmpeg is free software; you can redistribute it and/or
> > > + * modify it under the terms of the GNU Lesser General Public
> > > + * License as published by the Free Software Foundation; either
> > > + * version 2.1 of the License, or (at your option) any later version.
> > > + *
> > > + * FFmpeg is distributed in the hope that it will be useful,
> > > + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> > > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
> > GNU
> > > + * Lesser General Public License for more details.
> > > + *
> > > + * You should have received a copy of the GNU Lesser General Public
> > > + * License along with FFmpeg; if not, write to the Free Software
> > > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
> > > +02110-1301 USA  */ #include <string.h>
> > > +
> > > +#include "libavutil/avassert.h"
> > > +#include "libavutil/mem.h"
> > > +#include "libavutil/opt.h"
> > > +#include "libavutil/pixdesc.h"
> > > +
> > > +#include "avfilter.h"
> > > +#include "framesync.h"
> > > +#include "formats.h"
> > > +#include "internal.h"
> > > +#include "vaapi_vpp.h"
> > > +
> > > +#if VA_CHECK_VERSION(2, 1, 0)
> > > +// Blend State
> > > +static VABlendState blend_state = { /** \brief Video blending flags. */
> > > +    .flags        = VA_BLEND_PREMULTIPLIED_ALPHA,
> > //VA_BLEND_GLOBAL_ALPHA,
> > > +    .global_alpha = 1,
> > > +    .min_luma     = 0,
> > > +    .max_luma     = 1
> > > +};
> > > +#endif
> > > +
> > > +typedef struct OverlayVAAPIContext {
> > > +    VAAPIVPPContext  vpp_ctx; // must be the first field
> > > +
> > > +    FFFrameSync      fs;
> > > +
> > > +    int              overlay_x;
> > > +    int              overlay_y;
> > > +    int              overlay_w;
> > > +    int              overlay_h;
> > > +    float            overlay_alpha;
> > > +} OverlayVAAPIContext;
> > > +
> > > +static int overlay_vaapi_query_formats(AVFilterContext *ctx) {
> > > +    int i;
> > > +    int ret;
> > > +
> > > +    static const enum AVPixelFormat main_in_fmts[] = {
> > > +        AV_PIX_FMT_NV12,
> > > +        AV_PIX_FMT_RGB32,
> > > +        AV_PIX_FMT_VAAPI,
> > > +        AV_PIX_FMT_NONE
> > > +    };
> > > +    static const enum AVPixelFormat out_pix_fmts[] = {
> > > +        AV_PIX_FMT_NV12,
> > > +        AV_PIX_FMT_RGB32,
> > > +        AV_PIX_FMT_VAAPI,
> > > +        AV_PIX_FMT_NONE
> > > +    };
> > > +
> > > +    for (i = 0; i < ctx->nb_inputs; i++) {
> > > +        ret = ff_formats_ref(ff_make_format_list(main_in_fmts), &ctx-
> > >inputs[i]->out_formats);
> > > +        if (ret < 0)
> > > +            return ret;
> > > +    }
> > > +
> > > +    ret = ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx-
> > >outputs[0]->in_formats);
> > > +    if (ret < 0)
> > > +        return ret;
> > > +
> > > +    return 0;
> > > +}
> > > +
> > > +static int overlay_vaapi_blend(FFFrameSync *fs) {
> > > +    AVFilterContext    *avctx = fs->parent;
> > > +    AVFilterLink     *outlink = avctx->outputs[0];
> > > +    OverlayVAAPIContext *ctx  = avctx->priv;
> > > +    VAAPIVPPContext *vpp_ctx  = avctx->priv;
> > > +    AVFrame *input_main, *input_overlay;
> > > +    AVFrame *output;
> > > +    int err;
> > > +    VASurfaceID main_surface, overlay_surface, output_surface;
> > > +    VARectangle main_region, overlay_region, output_region;
> > > +    VAProcPipelineParameterBuffer params, subpic_params;
> > > +
> > > +    err = ff_framesync_get_frame(fs, 0, &input_main, 0);
> > > +    if (err < 0)
> > > +        return err;
> > > +    err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
> > > +    if (err < 0)
> > > +        return err;
> > > +
> > > +    av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
> > > +           av_get_pix_fmt_name(input_main->format),
> > > +           input_main->width, input_main->height, input_main->pts);
> > > +
> > > +    av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u
> > (%"PRId64").\n",
> > > +           av_get_pix_fmt_name(input_overlay->format),
> > > +           input_overlay->width, input_overlay->height,
> > > + input_overlay->pts);
> > > +
> > > +    if (vpp_ctx->va_context == VA_INVALID_ID)
> > > +        return AVERROR(EINVAL);
> > > +
> > > +    main_surface = (VASurfaceID)(uintptr_t)input_main->data[3];
> > > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp
> > main.\n",
> > > +           main_surface);
> > > +
> > > +    overlay_surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
> > > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp
> > overlay.\n",
> > > +           overlay_surface);
> > > +
> > > +    output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
> > > +    if (!output) {
> > > +        err = AVERROR(ENOMEM);
> > > +        goto fail;
> > > +    }
> > > +
> > > +    output_surface = (VASurfaceID)(uintptr_t)output->data[3];
> > > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp
> > output.\n",
> > > +           output_surface);
> > > +
> > > +    memset(&params, 0, sizeof(params));
> > > +    memset(&subpic_params, 0, sizeof(subpic_params));
> > > +
> > > +    main_region = (VARectangle) {
> > > +        .x      = 0,
> > > +        .y      = 0,
> > > +        .width  = input_main->width,
> > > +        .height = input_main->height,
> > > +    };
> > > +
> > > +    overlay_region = (VARectangle) {
> > > +        .x      = ctx->overlay_x,
> > > +        .y      = ctx->overlay_y,
> > > +        .width  = input_overlay->width,
> > > +        .height = input_overlay->height,
> > > +    };
> > > +
> > > +    output_region = (VARectangle) {
> > > +        .x      = 0,
> > > +        .y      = 0,
> > > +        .width  = output->width,
> > > +        .height = output->height,
> > > +    };
> > > +
> > Some question, if overlay_width > main_width && overlay_height >
> > main_height, what's the output in this filter?
>
> Thank you for the comments.
> in this case, if don't reset overlay_width and overlay_height via parameter (w and h), UMD will failed to do EndPicture.
>
> are you suggesting to add check to avoid this ?
>
I think both case need to support
> >
> > How about overlay_x > main_width && overlay_y > main_height?
> only main surface will be output in this occasion.
>
> >
> > > +    if (!ctx->overlay_w && !ctx->overlay_h) {
> > > +        overlay_region.width = ctx->overlay_w;
> > > +        overlay_region.height = ctx->overlay_h;
> > > +    }
> > > +
> > > +    params.filters     = &vpp_ctx->filter_buffers[0];
> > > +    params.num_filters = vpp_ctx->nb_filter_buffers;
> > > +
> > > +    params.surface = main_surface;
> > > +    params.surface_region = &main_region;
> > > +
> > > +    switch (input_main->colorspace) {
> > > +    case AVCOL_SPC_BT470BG:
> > > +        params.surface_color_standard = VAProcColorStandardBT601;
> > > +        break;
> > > +    case AVCOL_SPC_BT709:
> > > +        params.surface_color_standard = VAProcColorStandardBT709;
> > > +        break;
> > > +    default:
> > > +        params.surface_color_standard = VAProcColorStandardNone;
> > > +        break;
> > > +    }
> > > +
> > > +    params.output_region = &output_region;
> > > +    params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
> > > +    params.output_color_standard = params.surface_color_standard;
> > > +
> > > +    memcpy(&subpic_params, &params, sizeof(subpic_params));
> > > +    subpic_params.pipeline_flags |= VA_PROC_PIPELINE_FAST;
> > > +    subpic_params.filter_flags |= VA_FILTER_SCALING_FAST;
> > > +
> > > +#if VA_CHECK_VERSION(2, 1, 0)
> > > +    blend_state.global_alpha = ctx->overlay_alpha;
> > > +    subpic_params.blend_state = &blend_state; #endif
> > > +
> > > +    subpic_params.surface = overlay_surface;
> > > +    subpic_params.output_region = &overlay_region;
> > > +
> > > +    err = ff_vaapi_vpp_render_overlay(avctx, &params, &subpic_params,
> > output_surface);
> > > +    if (err < 0)
> > > +        goto fail;
> > > +
> > > +    err = av_frame_copy_props(output, input_main);
> > > +    if (err < 0)
> > > +        goto fail;
> > > +
> > > +    av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u
> > (%"PRId64").\n",
> > > +           av_get_pix_fmt_name(output->format),
> > > +           output->width, output->height, output->pts);
> > > +
> > > +    return ff_filter_frame(outlink, output);
> > > +
> > > +fail:
> > > +    av_frame_free(&output);
> > > +    return err;
> > > +}
> > > +
> > > +static int overlay_vaapi_init_framesync(AVFilterContext *avctx) {
> > > +    OverlayVAAPIContext *ctx = avctx->priv;
> > > +    int ret, i;
> > > +
> > > +    ctx->fs.on_event = overlay_vaapi_blend;
> > > +    ctx->fs.opaque   = ctx;
> > > +    ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);
> > > +    if (ret < 0)
> > > +        return ret;
> > > +
> > > +    for (i = 0; i < avctx->nb_inputs; i++) {
> > > +        FFFrameSyncIn *in = &ctx->fs.in[i];
> > > +        in->before    = EXT_STOP;
> > > +        in->after     = EXT_INFINITY;
> > > +        in->sync      = i ? 1 : 2;
> > > +        in->time_base = avctx->inputs[i]->time_base;
> > > +    }
> > > +
> > > +    return ff_framesync_configure(&ctx->fs); }
> > > +
> > > +static int overlay_vaapi_config_output(AVFilterLink *outlink) {
> > > +    AVFilterContext  *avctx  = outlink->src;
> > > +    OverlayVAAPIContext *ctx = avctx->priv;
> > > +    VAAPIVPPContext *vpp_ctx = avctx->priv;
> > > +    AVFilterLink        *in0 = avctx->inputs[0];
> > > +    AVFilterLink        *in1 = avctx->inputs[1];
> > > +    int err;
> > > +
> > > +    av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n",
> > av_get_pix_fmt_name(outlink->format));
> > > +    if ((in0->format == AV_PIX_FMT_VAAPI && in1->format !=
> > AV_PIX_FMT_VAAPI) ||
> > > +        (in0->format != AV_PIX_FMT_VAAPI && in1->format ==
> > AV_PIX_FMT_VAAPI)) {
> > > +        av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software pixel
> > formats is not supported.\n");
> > > +        return AVERROR(EINVAL);
> > > +    }
> > > +
> > > +    err = overlay_vaapi_init_framesync(avctx);
> > > +    if (err < 0)
> > > +        return err;
> > > +
> > > +    vpp_ctx->output_width  = avctx->inputs[0]->w;
> > > +    vpp_ctx->output_height = avctx->inputs[0]->h;
> > > +
> > > +    err = ff_vaapi_vpp_config_output(outlink);
> > > +    if (err < 0)
> > > +        return err;
> > > +
> > > +    err = ff_framesync_init_dualinput(&ctx->fs, avctx);
> > > +    if (err < 0)
> > > +        return err;
> > > +
> > > +    return ff_framesync_configure(&ctx->fs); }
> > > +
> > > +static av_cold int overlay_vaapi_init(AVFilterContext *avctx) {
> > > +    VAAPIVPPContext *vpp_ctx = avctx->priv;
> > > +
> > > +    ff_vaapi_vpp_ctx_init(avctx);
> > > +    vpp_ctx->output_format = AV_PIX_FMT_NONE;
> > > +
> > > +    return 0;
> > > +}
> > > +
> > > +static int overlay_vaapi_activate(AVFilterContext *avctx) {
> > > +    OverlayVAAPIContext *ctx = avctx->priv;
> > > +
> > > +    return ff_framesync_activate(&ctx->fs); }
> > > +
> > > +static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx) {
> > > +    OverlayVAAPIContext *ctx = avctx->priv;
> > > +
> > > +    ff_framesync_uninit(&ctx->fs);
> > > +}
> > > +
> > > +static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h)
> > > +{
> > > +    return ff_default_get_video_buffer(inlink, w, h); }
> > > +
> > > +#define OFFSET(x) offsetof(OverlayVAAPIContext, x) #define FLAGS
> > > +(AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM) static
> > const
> > > +AVOption overlay_vaapi_options[] = {
> > > +    { "x", "Overlay x position",
> > > +      OFFSET(overlay_x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =
> > FLAGS },
> > > +    { "y", "Overlay y position",
> > > +      OFFSET(overlay_y), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =
> > FLAGS },
> > > +    { "w", "Overlay width",
> > > +      OFFSET(overlay_w), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =
> > FLAGS },
> > > +    { "h", "Overlay hight",
> > > +      OFFSET(overlay_h), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags =
> > FLAGS },
> > > +    { "alpha", "Overlay global alpha",
> > > +      OFFSET(overlay_alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0}, 0.0,
> > 1.0, .flags = FLAGS},
> > > +    { NULL },
> > > +};
> > > +
> > > +AVFILTER_DEFINE_CLASS(overlay_vaapi);
> > > +
> > > +static const AVFilterPad overlay_vaapi_inputs[] = {
> > > +    {
> > > +        .name             = "main",
> > > +        .type             = AVMEDIA_TYPE_VIDEO,
> > > +        .get_video_buffer = get_video_buffer,
> > > +        .config_props     = &ff_vaapi_vpp_config_input,
> > > +        .needs_fifo       = 1,
> > > +    },
> > > +    {
> > > +        .name             = "overlay",
> > > +        .type             = AVMEDIA_TYPE_VIDEO,
> > > +        .get_video_buffer = get_video_buffer,
> > > +        .config_props     = &ff_vaapi_vpp_config_input,
> > > +        .needs_fifo       = 1,
> > > +    },
> > > +    { NULL }
> > > +};
> > > +
> > > +static const AVFilterPad overlay_vaapi_outputs[] = {
> > > +    {
> > > +        .name          = "default",
> > > +        .type          = AVMEDIA_TYPE_VIDEO,
> > > +        .config_props  = &overlay_vaapi_config_output,
> > > +    },
> > > +    { NULL }
> > > +};
> > > +
> > > +AVFilter ff_vf_overlay_vaapi = {
> > > +    .name            = "overlay_vaapi",
> > > +    .description     = NULL_IF_CONFIG_SMALL("Overlay one video on top of
> > another"),
> > > +    .priv_size       = sizeof(OverlayVAAPIContext),
> > > +    .priv_class      = &overlay_vaapi_class,
> > > +    .init            = &overlay_vaapi_init,
> > > +    .uninit          = &overlay_vaapi_uninit,
> > > +    .query_formats   = &overlay_vaapi_query_formats,
> > > +    .activate        = &overlay_vaapi_activate,
> > > +    .inputs          = overlay_vaapi_inputs,
> > > +    .outputs         = overlay_vaapi_outputs,
> > > +    .flags_internal  = FF_FILTER_FLAG_HWFRAME_AWARE, };
> > > --
Zachary Zhou June 20, 2019, 7:10 a.m. UTC | #5
> -----Original Message-----

> From: ffmpeg-devel [mailto:ffmpeg-devel-bounces@ffmpeg.org] On Behalf Of

> mypopy@gmail.com

> Sent: Wednesday, June 19, 2019 9:21 PM

> To: FFmpeg development discussions and patches <ffmpeg-devel@ffmpeg.org>

> Subject: Re: [FFmpeg-devel] [PATCH v2] avfilter/vaapi: add overlay_vaapi filter

> 

> On Wed, Jun 19, 2019 at 5:26 PM Zhou, Zachary <zachary.zhou@intel.com>

> wrote:

> >

> >

> >

> > > -----Original Message-----

> > > From: ffmpeg-devel [mailto:ffmpeg-devel-bounces@ffmpeg.org] On

> > > Behalf Of mypopy@gmail.com

> > > Sent: Wednesday, June 19, 2019 10:13 AM

> > > To: FFmpeg development discussions and patches

> > > <ffmpeg-devel@ffmpeg.org>

> > > Subject: Re: [FFmpeg-devel] [PATCH v2] avfilter/vaapi: add

> > > overlay_vaapi filter

> > >

> > >  ?

> > > On Tue, Jun 11, 2019 at 10:52 AM Zachary Zhou

> > > <zachary.zhou@intel.com>

> > > wrote:

> > > >

> > > > ---

> > > >  configure                      |   1 +

> > > >  libavfilter/Makefile           |   1 +

> > > >  libavfilter/allfilters.c       |   1 +

> > > >  libavfilter/vaapi_vpp.c        |  95 +++++++++

> > > >  libavfilter/vaapi_vpp.h        |   5 +

> > > >  libavfilter/vf_overlay_vaapi.c | 357

> > > > +++++++++++++++++++++++++++++++++

> > > >  6 files changed, 460 insertions(+)  create mode 100644

> > > > libavfilter/vf_overlay_vaapi.c

> > > >

> > > > diff --git a/configure b/configure index 32fc26356c..f469e6a3b1

> > > > 100755

> > > > --- a/configure

> > > > +++ b/configure

> > > > @@ -3478,6 +3478,7 @@ openclsrc_filter_deps="opencl"

> > > >  overlay_opencl_filter_deps="opencl"

> > > >  overlay_qsv_filter_deps="libmfx"

> > > >  overlay_qsv_filter_select="qsvvpp"

> > > > +overlay_vaapi_filter_deps="vaapi"

> > > >  owdenoise_filter_deps="gpl"

> > > >  pan_filter_deps="swresample"

> > > >  perspective_filter_deps="gpl"

> > > > diff --git a/libavfilter/Makefile b/libavfilter/Makefile index

> > > > 07ea8d7edc..5cbf1a7e41 100644

> > > > --- a/libavfilter/Makefile

> > > > +++ b/libavfilter/Makefile

> > > > @@ -311,6 +311,7 @@ OBJS-$(CONFIG_OVERLAY_FILTER)                +=

> > > vf_overlay.o framesync.o

> > > >  OBJS-$(CONFIG_OVERLAY_OPENCL_FILTER)         += vf_overlay_opencl.o

> > > opencl.o \

> > > >                                                  opencl/overlay.o framesync.o

> > > >  OBJS-$(CONFIG_OVERLAY_QSV_FILTER)            += vf_overlay_qsv.o

> > > framesync.o

> > > > +OBJS-$(CONFIG_OVERLAY_VAAPI_FILTER)          += vf_overlay_vaapi.o

> > > framesync.o

> > > >  OBJS-$(CONFIG_OWDENOISE_FILTER)              += vf_owdenoise.o

> > > >  OBJS-$(CONFIG_PAD_FILTER)                    += vf_pad.o

> > > >  OBJS-$(CONFIG_PALETTEGEN_FILTER)             += vf_palettegen.o

> > > > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c

> > > > index 9c846b1ddd..27ee1df78b 100644

> > > > --- a/libavfilter/allfilters.c

> > > > +++ b/libavfilter/allfilters.c

> > > > @@ -295,6 +295,7 @@ extern AVFilter ff_vf_oscilloscope;  extern

> > > > AVFilter ff_vf_overlay;  extern AVFilter ff_vf_overlay_opencl;

> > > > extern AVFilter ff_vf_overlay_qsv;

> > > > +extern AVFilter ff_vf_overlay_vaapi;

> > > >  extern AVFilter ff_vf_owdenoise;

> > > >  extern AVFilter ff_vf_pad;

> > > >  extern AVFilter ff_vf_palettegen; diff --git

> > > > a/libavfilter/vaapi_vpp.c b/libavfilter/vaapi_vpp.c index

> > > > b5b245c8af..a8caa5b532 100644

> > > > --- a/libavfilter/vaapi_vpp.c

> > > > +++ b/libavfilter/vaapi_vpp.c

> > > > @@ -663,6 +663,101 @@ fail:

> > > >      return err;

> > > >  }

> > > >

> > > > +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,

> > > > +                                VAProcPipelineParameterBuffer *params,

> > > > +                                VAProcPipelineParameterBuffer *subpic_params,

> > > > +                                VASurfaceID output_surface) {

> > > > +    VABufferID params_id;

> > > > +    VABufferID subpic_params_id;

> > > > +    VAStatus vas;

> > > > +    int err = 0;

> > > > +    VAAPIVPPContext *ctx   = avctx->priv;

> > > > +

> > > > +    vas = vaBeginPicture(ctx->hwctx->display,

> > > > +                         ctx->va_context, output_surface);

> > > > +    if (vas != VA_STATUS_SUCCESS) {

> > > > +        av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "

> > > > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +        err = AVERROR(EIO);

> > > > +        goto fail;

> > > > +    }

> > > > +

> > > > +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,

> > > > +                         VAProcPipelineParameterBufferType,

> > > > +                         sizeof(*params), 1, params, &params_id);

> > > > +    if (vas != VA_STATUS_SUCCESS) {

> > > > +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "

> > > > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +        err = AVERROR(EIO);

> > > > +        goto fail_after_begin;

> > > > +    }

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",

> > > > +           params_id);

> > > > +

> > > > +

> > > > +    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,

> > > > +                         VAProcPipelineParameterBufferType,

> > > > +                         sizeof(*subpic_params), 1,

> > > > + subpic_params,

> > > &subpic_params_id);

> > > > +    if (vas != VA_STATUS_SUCCESS) {

> > > > +        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "

> > > > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +        err = AVERROR(EIO);

> > > > +        goto fail_after_begin;

> > > > +    }

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer

> > > is %#x.\n",

> > > > +           subpic_params_id);

> > > > +

> > > > +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,

> > > > +                          &params_id, 1);

> > > > +    if (vas != VA_STATUS_SUCCESS) {

> > > > +        av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer:

> "

> > > > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +        err = AVERROR(EIO);

> > > > +        goto fail_after_begin;

> > > > +    }

> > > > +

> > > > +    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,

> > > > +                          &subpic_params_id, 1);

> > > > +    if (vas != VA_STATUS_SUCCESS) {

> > > > +        av_log(avctx, AV_LOG_ERROR, "Failed to render subpic

> > > > + parameter

> > > buffer: "

> > > > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +        err = AVERROR(EIO);

> > > > +        goto fail_after_begin;

> > > > +    }

> > > > +

> > > > +    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);

> > > > +    if (vas != VA_STATUS_SUCCESS) {

> > > > +        av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "

> > > > +               "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +        err = AVERROR(EIO);

> > > > +        goto fail_after_render;

> > > > +    }

> > > > +

> > > > +    if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &

> > > > +        AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {

> > > > +        vas = vaDestroyBuffer(ctx->hwctx->display, params_id);

> > > > +        if (vas != VA_STATUS_SUCCESS) {

> > > > +            av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "

> > > > +                   "%d (%s).\n", vas, vaErrorStr(vas));

> > > > +            // And ignore.

> > > > +        }

> > > > +    }

> > > > +

> > > > +    return 0;

> > > > +

> > > > +    // We want to make sure that if vaBeginPicture has been called, we

> also

> > > > +    // call vaRenderPicture and vaEndPicture.  These calls may well fail or

> > > > +    // do something else nasty, but once we're in this failure case there

> > > > +    // isn't much else we can do.

> > > > +fail_after_begin:

> > > > +    vaRenderPicture(ctx->hwctx->display, ctx->va_context,

> > > > +&params_id, 1);

> > > > +fail_after_render:

> > > > +    vaEndPicture(ctx->hwctx->display, ctx->va_context);

> > > > +fail:

> > > > +    return err;

> > > > +}

> > > > +

> > > >  void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)  {

> > > >      int i;

> > > > diff --git a/libavfilter/vaapi_vpp.h b/libavfilter/vaapi_vpp.h

> > > > index

> > > > c3da91717c..8d53acdec9 100644

> > > > --- a/libavfilter/vaapi_vpp.h

> > > > +++ b/libavfilter/vaapi_vpp.h

> > > > @@ -83,4 +83,9 @@ int ff_vaapi_vpp_render_picture(AVFilterContext

> > > *avctx,

> > > >                                  VAProcPipelineParameterBuffer *params,

> > > >                                  AVFrame *output_frame);

> > > >

> > > > +int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,

> > > > +                                VAProcPipelineParameterBuffer *primary_params,

> > > > +                                VAProcPipelineParameterBuffer *subpic_params,

> > > > +                                VASurfaceID output_surface);

> > > > +

> > > >  #endif /* AVFILTER_VAAPI_VPP_H */ diff --git

> > > > a/libavfilter/vf_overlay_vaapi.c b/libavfilter/vf_overlay_vaapi.c

> > > > new file mode 100644 index

> > > > 0000000000..0f72b23611

> > > > --- /dev/null

> > > > +++ b/libavfilter/vf_overlay_vaapi.c

> > > > @@ -0,0 +1,357 @@

> > > > +/*

> > > > + * This file is part of FFmpeg.

> > > > + *

> > > > + * FFmpeg is free software; you can redistribute it and/or

> > > > + * modify it under the terms of the GNU Lesser General Public

> > > > + * License as published by the Free Software Foundation; either

> > > > + * version 2.1 of the License, or (at your option) any later version.

> > > > + *

> > > > + * FFmpeg is distributed in the hope that it will be useful,

> > > > + * but WITHOUT ANY WARRANTY; without even the implied warranty of

> > > > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

> > > GNU

> > > > + * Lesser General Public License for more details.

> > > > + *

> > > > + * You should have received a copy of the GNU Lesser General

> > > > +Public

> > > > + * License along with FFmpeg; if not, write to the Free Software

> > > > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA

> > > > +02110-1301 USA  */ #include <string.h>

> > > > +

> > > > +#include "libavutil/avassert.h"

> > > > +#include "libavutil/mem.h"

> > > > +#include "libavutil/opt.h"

> > > > +#include "libavutil/pixdesc.h"

> > > > +

> > > > +#include "avfilter.h"

> > > > +#include "framesync.h"

> > > > +#include "formats.h"

> > > > +#include "internal.h"

> > > > +#include "vaapi_vpp.h"

> > > > +

> > > > +#if VA_CHECK_VERSION(2, 1, 0)

> > > > +// Blend State

> > > > +static VABlendState blend_state = { /** \brief Video blending flags. */

> > > > +    .flags        = VA_BLEND_PREMULTIPLIED_ALPHA,

> > > //VA_BLEND_GLOBAL_ALPHA,

> > > > +    .global_alpha = 1,

> > > > +    .min_luma     = 0,

> > > > +    .max_luma     = 1

> > > > +};

> > > > +#endif

> > > > +

> > > > +typedef struct OverlayVAAPIContext {

> > > > +    VAAPIVPPContext  vpp_ctx; // must be the first field

> > > > +

> > > > +    FFFrameSync      fs;

> > > > +

> > > > +    int              overlay_x;

> > > > +    int              overlay_y;

> > > > +    int              overlay_w;

> > > > +    int              overlay_h;

> > > > +    float            overlay_alpha;

> > > > +} OverlayVAAPIContext;

> > > > +

> > > > +static int overlay_vaapi_query_formats(AVFilterContext *ctx) {

> > > > +    int i;

> > > > +    int ret;

> > > > +

> > > > +    static const enum AVPixelFormat main_in_fmts[] = {

> > > > +        AV_PIX_FMT_NV12,

> > > > +        AV_PIX_FMT_RGB32,

> > > > +        AV_PIX_FMT_VAAPI,

> > > > +        AV_PIX_FMT_NONE

> > > > +    };

> > > > +    static const enum AVPixelFormat out_pix_fmts[] = {

> > > > +        AV_PIX_FMT_NV12,

> > > > +        AV_PIX_FMT_RGB32,

> > > > +        AV_PIX_FMT_VAAPI,

> > > > +        AV_PIX_FMT_NONE

> > > > +    };

> > > > +

> > > > +    for (i = 0; i < ctx->nb_inputs; i++) {

> > > > +        ret = ff_formats_ref(ff_make_format_list(main_in_fmts),

> > > > + &ctx-

> > > >inputs[i]->out_formats);

> > > > +        if (ret < 0)

> > > > +            return ret;

> > > > +    }

> > > > +

> > > > +    ret = ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx-

> > > >outputs[0]->in_formats);

> > > > +    if (ret < 0)

> > > > +        return ret;

> > > > +

> > > > +    return 0;

> > > > +}

> > > > +

> > > > +static int overlay_vaapi_blend(FFFrameSync *fs) {

> > > > +    AVFilterContext    *avctx = fs->parent;

> > > > +    AVFilterLink     *outlink = avctx->outputs[0];

> > > > +    OverlayVAAPIContext *ctx  = avctx->priv;

> > > > +    VAAPIVPPContext *vpp_ctx  = avctx->priv;

> > > > +    AVFrame *input_main, *input_overlay;

> > > > +    AVFrame *output;

> > > > +    int err;

> > > > +    VASurfaceID main_surface, overlay_surface, output_surface;

> > > > +    VARectangle main_region, overlay_region, output_region;

> > > > +    VAProcPipelineParameterBuffer params, subpic_params;

> > > > +

> > > > +    err = ff_framesync_get_frame(fs, 0, &input_main, 0);

> > > > +    if (err < 0)

> > > > +        return err;

> > > > +    err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);

> > > > +    if (err < 0)

> > > > +        return err;

> > > > +

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u

> (%"PRId64").\n",

> > > > +           av_get_pix_fmt_name(input_main->format),

> > > > +           input_main->width, input_main->height,

> > > > + input_main->pts);

> > > > +

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u

> > > (%"PRId64").\n",

> > > > +           av_get_pix_fmt_name(input_overlay->format),

> > > > +           input_overlay->width, input_overlay->height,

> > > > + input_overlay->pts);

> > > > +

> > > > +    if (vpp_ctx->va_context == VA_INVALID_ID)

> > > > +        return AVERROR(EINVAL);

> > > > +

> > > > +    main_surface = (VASurfaceID)(uintptr_t)input_main->data[3];

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay

> > > > + vpp

> > > main.\n",

> > > > +           main_surface);

> > > > +

> > > > +    overlay_surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay

> > > > + vpp

> > > overlay.\n",

> > > > +           overlay_surface);

> > > > +

> > > > +    output = ff_get_video_buffer(outlink, outlink->w, outlink->h);

> > > > +    if (!output) {

> > > > +        err = AVERROR(ENOMEM);

> > > > +        goto fail;

> > > > +    }

> > > > +

> > > > +    output_surface = (VASurfaceID)(uintptr_t)output->data[3];

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay

> > > > + vpp

> > > output.\n",

> > > > +           output_surface);

> > > > +

> > > > +    memset(&params, 0, sizeof(params));

> > > > +    memset(&subpic_params, 0, sizeof(subpic_params));

> > > > +

> > > > +    main_region = (VARectangle) {

> > > > +        .x      = 0,

> > > > +        .y      = 0,

> > > > +        .width  = input_main->width,

> > > > +        .height = input_main->height,

> > > > +    };

> > > > +

> > > > +    overlay_region = (VARectangle) {

> > > > +        .x      = ctx->overlay_x,

> > > > +        .y      = ctx->overlay_y,

> > > > +        .width  = input_overlay->width,

> > > > +        .height = input_overlay->height,

> > > > +    };

> > > > +

> > > > +    output_region = (VARectangle) {

> > > > +        .x      = 0,

> > > > +        .y      = 0,

> > > > +        .width  = output->width,

> > > > +        .height = output->height,

> > > > +    };

> > > > +

> > > Some question, if overlay_width > main_width && overlay_height >

> > > main_height, what's the output in this filter?

> >

> > Thank you for the comments.

> > in this case, if don't reset overlay_width and overlay_height via parameter

> (w and h), UMD will failed to do EndPicture.

> >

> > are you suggesting to add check to avoid this ?

> >

> I think both case need to support


Thank you, I got it, new version patch will be sent out soon.

> > >

> > > How about overlay_x > main_width && overlay_y > main_height?

> > only main surface will be output in this occasion.

> >

> > >

> > > > +    if (!ctx->overlay_w && !ctx->overlay_h) {

> > > > +        overlay_region.width = ctx->overlay_w;

> > > > +        overlay_region.height = ctx->overlay_h;

> > > > +    }

> > > > +

> > > > +    params.filters     = &vpp_ctx->filter_buffers[0];

> > > > +    params.num_filters = vpp_ctx->nb_filter_buffers;

> > > > +

> > > > +    params.surface = main_surface;

> > > > +    params.surface_region = &main_region;

> > > > +

> > > > +    switch (input_main->colorspace) {

> > > > +    case AVCOL_SPC_BT470BG:

> > > > +        params.surface_color_standard = VAProcColorStandardBT601;

> > > > +        break;

> > > > +    case AVCOL_SPC_BT709:

> > > > +        params.surface_color_standard = VAProcColorStandardBT709;

> > > > +        break;

> > > > +    default:

> > > > +        params.surface_color_standard = VAProcColorStandardNone;

> > > > +        break;

> > > > +    }

> > > > +

> > > > +    params.output_region = &output_region;

> > > > +    params.output_background_color =

> VAAPI_VPP_BACKGROUND_BLACK;

> > > > +    params.output_color_standard = params.surface_color_standard;

> > > > +

> > > > +    memcpy(&subpic_params, &params, sizeof(subpic_params));

> > > > +    subpic_params.pipeline_flags |= VA_PROC_PIPELINE_FAST;

> > > > +    subpic_params.filter_flags |= VA_FILTER_SCALING_FAST;

> > > > +

> > > > +#if VA_CHECK_VERSION(2, 1, 0)

> > > > +    blend_state.global_alpha = ctx->overlay_alpha;

> > > > +    subpic_params.blend_state = &blend_state; #endif

> > > > +

> > > > +    subpic_params.surface = overlay_surface;

> > > > +    subpic_params.output_region = &overlay_region;

> > > > +

> > > > +    err = ff_vaapi_vpp_render_overlay(avctx, &params,

> > > > + &subpic_params,

> > > output_surface);

> > > > +    if (err < 0)

> > > > +        goto fail;

> > > > +

> > > > +    err = av_frame_copy_props(output, input_main);

> > > > +    if (err < 0)

> > > > +        goto fail;

> > > > +

> > > > +    av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u

> > > (%"PRId64").\n",

> > > > +           av_get_pix_fmt_name(output->format),

> > > > +           output->width, output->height, output->pts);

> > > > +

> > > > +    return ff_filter_frame(outlink, output);

> > > > +

> > > > +fail:

> > > > +    av_frame_free(&output);

> > > > +    return err;

> > > > +}

> > > > +

> > > > +static int overlay_vaapi_init_framesync(AVFilterContext *avctx) {

> > > > +    OverlayVAAPIContext *ctx = avctx->priv;

> > > > +    int ret, i;

> > > > +

> > > > +    ctx->fs.on_event = overlay_vaapi_blend;

> > > > +    ctx->fs.opaque   = ctx;

> > > > +    ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);

> > > > +    if (ret < 0)

> > > > +        return ret;

> > > > +

> > > > +    for (i = 0; i < avctx->nb_inputs; i++) {

> > > > +        FFFrameSyncIn *in = &ctx->fs.in[i];

> > > > +        in->before    = EXT_STOP;

> > > > +        in->after     = EXT_INFINITY;

> > > > +        in->sync      = i ? 1 : 2;

> > > > +        in->time_base = avctx->inputs[i]->time_base;

> > > > +    }

> > > > +

> > > > +    return ff_framesync_configure(&ctx->fs); }

> > > > +

> > > > +static int overlay_vaapi_config_output(AVFilterLink *outlink) {

> > > > +    AVFilterContext  *avctx  = outlink->src;

> > > > +    OverlayVAAPIContext *ctx = avctx->priv;

> > > > +    VAAPIVPPContext *vpp_ctx = avctx->priv;

> > > > +    AVFilterLink        *in0 = avctx->inputs[0];

> > > > +    AVFilterLink        *in1 = avctx->inputs[1];

> > > > +    int err;

> > > > +

> > > > +    av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n",

> > > av_get_pix_fmt_name(outlink->format));

> > > > +    if ((in0->format == AV_PIX_FMT_VAAPI && in1->format !=

> > > AV_PIX_FMT_VAAPI) ||

> > > > +        (in0->format != AV_PIX_FMT_VAAPI && in1->format ==

> > > AV_PIX_FMT_VAAPI)) {

> > > > +        av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software

> > > > + pixel

> > > formats is not supported.\n");

> > > > +        return AVERROR(EINVAL);

> > > > +    }

> > > > +

> > > > +    err = overlay_vaapi_init_framesync(avctx);

> > > > +    if (err < 0)

> > > > +        return err;

> > > > +

> > > > +    vpp_ctx->output_width  = avctx->inputs[0]->w;

> > > > +    vpp_ctx->output_height = avctx->inputs[0]->h;

> > > > +

> > > > +    err = ff_vaapi_vpp_config_output(outlink);

> > > > +    if (err < 0)

> > > > +        return err;

> > > > +

> > > > +    err = ff_framesync_init_dualinput(&ctx->fs, avctx);

> > > > +    if (err < 0)

> > > > +        return err;

> > > > +

> > > > +    return ff_framesync_configure(&ctx->fs); }

> > > > +

> > > > +static av_cold int overlay_vaapi_init(AVFilterContext *avctx) {

> > > > +    VAAPIVPPContext *vpp_ctx = avctx->priv;

> > > > +

> > > > +    ff_vaapi_vpp_ctx_init(avctx);

> > > > +    vpp_ctx->output_format = AV_PIX_FMT_NONE;

> > > > +

> > > > +    return 0;

> > > > +}

> > > > +

> > > > +static int overlay_vaapi_activate(AVFilterContext *avctx) {

> > > > +    OverlayVAAPIContext *ctx = avctx->priv;

> > > > +

> > > > +    return ff_framesync_activate(&ctx->fs); }

> > > > +

> > > > +static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx) {

> > > > +    OverlayVAAPIContext *ctx = avctx->priv;

> > > > +

> > > > +    ff_framesync_uninit(&ctx->fs); }

> > > > +

> > > > +static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int

> > > > +h) {

> > > > +    return ff_default_get_video_buffer(inlink, w, h); }

> > > > +

> > > > +#define OFFSET(x) offsetof(OverlayVAAPIContext, x) #define FLAGS

> > > > +(AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)

> static

> > > const

> > > > +AVOption overlay_vaapi_options[] = {

> > > > +    { "x", "Overlay x position",

> > > > +      OFFSET(overlay_x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0,

> > > > +INT_MAX, .flags =

> > > FLAGS },

> > > > +    { "y", "Overlay y position",

> > > > +      OFFSET(overlay_y), AV_OPT_TYPE_INT, { .i64 = 0 }, 0,

> > > > + INT_MAX, .flags =

> > > FLAGS },

> > > > +    { "w", "Overlay width",

> > > > +      OFFSET(overlay_w), AV_OPT_TYPE_INT, { .i64 = 0 }, 0,

> > > > + INT_MAX, .flags =

> > > FLAGS },

> > > > +    { "h", "Overlay hight",

> > > > +      OFFSET(overlay_h), AV_OPT_TYPE_INT, { .i64 = 0 }, 0,

> > > > + INT_MAX, .flags =

> > > FLAGS },

> > > > +    { "alpha", "Overlay global alpha",

> > > > +      OFFSET(overlay_alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0},

> > > > + 0.0,

> > > 1.0, .flags = FLAGS},

> > > > +    { NULL },

> > > > +};

> > > > +

> > > > +AVFILTER_DEFINE_CLASS(overlay_vaapi);

> > > > +

> > > > +static const AVFilterPad overlay_vaapi_inputs[] = {

> > > > +    {

> > > > +        .name             = "main",

> > > > +        .type             = AVMEDIA_TYPE_VIDEO,

> > > > +        .get_video_buffer = get_video_buffer,

> > > > +        .config_props     = &ff_vaapi_vpp_config_input,

> > > > +        .needs_fifo       = 1,

> > > > +    },

> > > > +    {

> > > > +        .name             = "overlay",

> > > > +        .type             = AVMEDIA_TYPE_VIDEO,

> > > > +        .get_video_buffer = get_video_buffer,

> > > > +        .config_props     = &ff_vaapi_vpp_config_input,

> > > > +        .needs_fifo       = 1,

> > > > +    },

> > > > +    { NULL }

> > > > +};

> > > > +

> > > > +static const AVFilterPad overlay_vaapi_outputs[] = {

> > > > +    {

> > > > +        .name          = "default",

> > > > +        .type          = AVMEDIA_TYPE_VIDEO,

> > > > +        .config_props  = &overlay_vaapi_config_output,

> > > > +    },

> > > > +    { NULL }

> > > > +};

> > > > +

> > > > +AVFilter ff_vf_overlay_vaapi = {

> > > > +    .name            = "overlay_vaapi",

> > > > +    .description     = NULL_IF_CONFIG_SMALL("Overlay one video on top

> of

> > > another"),

> > > > +    .priv_size       = sizeof(OverlayVAAPIContext),

> > > > +    .priv_class      = &overlay_vaapi_class,

> > > > +    .init            = &overlay_vaapi_init,

> > > > +    .uninit          = &overlay_vaapi_uninit,

> > > > +    .query_formats   = &overlay_vaapi_query_formats,

> > > > +    .activate        = &overlay_vaapi_activate,

> > > > +    .inputs          = overlay_vaapi_inputs,

> > > > +    .outputs         = overlay_vaapi_outputs,

> > > > +    .flags_internal  = FF_FILTER_FLAG_HWFRAME_AWARE, };

> > > > --

> _______________________________________________

> ffmpeg-devel mailing list

> ffmpeg-devel@ffmpeg.org

> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

> 

> To unsubscribe, visit link above, or email ffmpeg-devel-request@ffmpeg.org

> with subject "unsubscribe".
diff mbox

Patch

diff --git a/configure b/configure
index 32fc26356c..f469e6a3b1 100755
--- a/configure
+++ b/configure
@@ -3478,6 +3478,7 @@  openclsrc_filter_deps="opencl"
 overlay_opencl_filter_deps="opencl"
 overlay_qsv_filter_deps="libmfx"
 overlay_qsv_filter_select="qsvvpp"
+overlay_vaapi_filter_deps="vaapi"
 owdenoise_filter_deps="gpl"
 pan_filter_deps="swresample"
 perspective_filter_deps="gpl"
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index 07ea8d7edc..5cbf1a7e41 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -311,6 +311,7 @@  OBJS-$(CONFIG_OVERLAY_FILTER)                += vf_overlay.o framesync.o
 OBJS-$(CONFIG_OVERLAY_OPENCL_FILTER)         += vf_overlay_opencl.o opencl.o \
                                                 opencl/overlay.o framesync.o
 OBJS-$(CONFIG_OVERLAY_QSV_FILTER)            += vf_overlay_qsv.o framesync.o
+OBJS-$(CONFIG_OVERLAY_VAAPI_FILTER)          += vf_overlay_vaapi.o framesync.o
 OBJS-$(CONFIG_OWDENOISE_FILTER)              += vf_owdenoise.o
 OBJS-$(CONFIG_PAD_FILTER)                    += vf_pad.o
 OBJS-$(CONFIG_PALETTEGEN_FILTER)             += vf_palettegen.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index 9c846b1ddd..27ee1df78b 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -295,6 +295,7 @@  extern AVFilter ff_vf_oscilloscope;
 extern AVFilter ff_vf_overlay;
 extern AVFilter ff_vf_overlay_opencl;
 extern AVFilter ff_vf_overlay_qsv;
+extern AVFilter ff_vf_overlay_vaapi;
 extern AVFilter ff_vf_owdenoise;
 extern AVFilter ff_vf_pad;
 extern AVFilter ff_vf_palettegen;
diff --git a/libavfilter/vaapi_vpp.c b/libavfilter/vaapi_vpp.c
index b5b245c8af..a8caa5b532 100644
--- a/libavfilter/vaapi_vpp.c
+++ b/libavfilter/vaapi_vpp.c
@@ -663,6 +663,101 @@  fail:
     return err;
 }
 
+int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,
+                                VAProcPipelineParameterBuffer *params,
+                                VAProcPipelineParameterBuffer *subpic_params,
+                                VASurfaceID output_surface)
+{
+    VABufferID params_id;
+    VABufferID subpic_params_id;
+    VAStatus vas;
+    int err = 0;
+    VAAPIVPPContext *ctx   = avctx->priv;
+
+    vas = vaBeginPicture(ctx->hwctx->display,
+                         ctx->va_context, output_surface);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR(EIO);
+        goto fail;
+    }
+
+    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
+                         VAProcPipelineParameterBufferType,
+                         sizeof(*params), 1, params, &params_id);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR(EIO);
+        goto fail_after_begin;
+    }
+    av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
+           params_id);
+
+
+    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
+                         VAProcPipelineParameterBufferType,
+                         sizeof(*subpic_params), 1, subpic_params, &subpic_params_id);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR(EIO);
+        goto fail_after_begin;
+    }
+    av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer is %#x.\n",
+           subpic_params_id);
+
+    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
+                          &params_id, 1);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR(EIO);
+        goto fail_after_begin;
+    }
+
+    vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
+                          &subpic_params_id, 1);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter buffer: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR(EIO);
+        goto fail_after_begin;
+    }
+
+    vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR(EIO);
+        goto fail_after_render;
+    }
+
+    if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
+        AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
+        vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
+        if (vas != VA_STATUS_SUCCESS) {
+            av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
+                   "%d (%s).\n", vas, vaErrorStr(vas));
+            // And ignore.
+        }
+    }
+
+    return 0;
+
+    // We want to make sure that if vaBeginPicture has been called, we also
+    // call vaRenderPicture and vaEndPicture.  These calls may well fail or
+    // do something else nasty, but once we're in this failure case there
+    // isn't much else we can do.
+fail_after_begin:
+    vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
+fail_after_render:
+    vaEndPicture(ctx->hwctx->display, ctx->va_context);
+fail:
+    return err;
+}
+
 void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
 {
     int i;
diff --git a/libavfilter/vaapi_vpp.h b/libavfilter/vaapi_vpp.h
index c3da91717c..8d53acdec9 100644
--- a/libavfilter/vaapi_vpp.h
+++ b/libavfilter/vaapi_vpp.h
@@ -83,4 +83,9 @@  int ff_vaapi_vpp_render_picture(AVFilterContext *avctx,
                                 VAProcPipelineParameterBuffer *params,
                                 AVFrame *output_frame);
 
+int ff_vaapi_vpp_render_overlay(AVFilterContext *avctx,
+                                VAProcPipelineParameterBuffer *primary_params,
+                                VAProcPipelineParameterBuffer *subpic_params,
+                                VASurfaceID output_surface);
+
 #endif /* AVFILTER_VAAPI_VPP_H */
diff --git a/libavfilter/vf_overlay_vaapi.c b/libavfilter/vf_overlay_vaapi.c
new file mode 100644
index 0000000000..0f72b23611
--- /dev/null
+++ b/libavfilter/vf_overlay_vaapi.c
@@ -0,0 +1,357 @@ 
+/*
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+#include <string.h>
+
+#include "libavutil/avassert.h"
+#include "libavutil/mem.h"
+#include "libavutil/opt.h"
+#include "libavutil/pixdesc.h"
+
+#include "avfilter.h"
+#include "framesync.h"
+#include "formats.h"
+#include "internal.h"
+#include "vaapi_vpp.h"
+
+#if VA_CHECK_VERSION(2, 1, 0)
+// Blend State
+static VABlendState blend_state = { /** \brief Video blending flags. */
+    .flags        = VA_BLEND_PREMULTIPLIED_ALPHA, //VA_BLEND_GLOBAL_ALPHA,
+    .global_alpha = 1,
+    .min_luma     = 0,
+    .max_luma     = 1
+};
+#endif
+
+typedef struct OverlayVAAPIContext {
+    VAAPIVPPContext  vpp_ctx; // must be the first field
+
+    FFFrameSync      fs;
+
+    int              overlay_x;
+    int              overlay_y;
+    int              overlay_w;
+    int              overlay_h;
+    float            overlay_alpha;
+} OverlayVAAPIContext;
+
+static int overlay_vaapi_query_formats(AVFilterContext *ctx)
+{
+    int i;
+    int ret;
+
+    static const enum AVPixelFormat main_in_fmts[] = {
+        AV_PIX_FMT_NV12,
+        AV_PIX_FMT_RGB32,
+        AV_PIX_FMT_VAAPI,
+        AV_PIX_FMT_NONE
+    };
+    static const enum AVPixelFormat out_pix_fmts[] = {
+        AV_PIX_FMT_NV12,
+        AV_PIX_FMT_RGB32,
+        AV_PIX_FMT_VAAPI,
+        AV_PIX_FMT_NONE
+    };
+
+    for (i = 0; i < ctx->nb_inputs; i++) {
+        ret = ff_formats_ref(ff_make_format_list(main_in_fmts), &ctx->inputs[i]->out_formats);
+        if (ret < 0)
+            return ret;
+    }
+
+    ret = ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx->outputs[0]->in_formats);
+    if (ret < 0)
+        return ret;
+
+    return 0;
+}
+
+static int overlay_vaapi_blend(FFFrameSync *fs)
+{
+    AVFilterContext    *avctx = fs->parent;
+    AVFilterLink     *outlink = avctx->outputs[0];
+    OverlayVAAPIContext *ctx  = avctx->priv;
+    VAAPIVPPContext *vpp_ctx  = avctx->priv;
+    AVFrame *input_main, *input_overlay;
+    AVFrame *output;
+    int err;
+    VASurfaceID main_surface, overlay_surface, output_surface;
+    VARectangle main_region, overlay_region, output_region;
+    VAProcPipelineParameterBuffer params, subpic_params;
+
+    err = ff_framesync_get_frame(fs, 0, &input_main, 0);
+    if (err < 0)
+        return err;
+    err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
+    if (err < 0)
+        return err;
+
+    av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
+           av_get_pix_fmt_name(input_main->format),
+           input_main->width, input_main->height, input_main->pts);
+
+    av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
+           av_get_pix_fmt_name(input_overlay->format),
+           input_overlay->width, input_overlay->height, input_overlay->pts);
+
+    if (vpp_ctx->va_context == VA_INVALID_ID)
+        return AVERROR(EINVAL);
+
+    main_surface = (VASurfaceID)(uintptr_t)input_main->data[3];
+    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp main.\n",
+           main_surface);
+
+    overlay_surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
+    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp overlay.\n",
+           overlay_surface);
+
+    output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
+    if (!output) {
+        err = AVERROR(ENOMEM);
+        goto fail;
+    }
+
+    output_surface = (VASurfaceID)(uintptr_t)output->data[3];
+    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for overlay vpp output.\n",
+           output_surface);
+
+    memset(&params, 0, sizeof(params));
+    memset(&subpic_params, 0, sizeof(subpic_params));
+
+    main_region = (VARectangle) {
+        .x      = 0,
+        .y      = 0,
+        .width  = input_main->width,
+        .height = input_main->height,
+    };
+
+    overlay_region = (VARectangle) {
+        .x      = ctx->overlay_x,
+        .y      = ctx->overlay_y,
+        .width  = input_overlay->width,
+        .height = input_overlay->height,
+    };
+
+    output_region = (VARectangle) {
+        .x      = 0,
+        .y      = 0,
+        .width  = output->width,
+        .height = output->height,
+    };
+
+    if (!ctx->overlay_w && !ctx->overlay_h) {
+        overlay_region.width = ctx->overlay_w;
+        overlay_region.height = ctx->overlay_h;
+    }
+
+    params.filters     = &vpp_ctx->filter_buffers[0];
+    params.num_filters = vpp_ctx->nb_filter_buffers;
+
+    params.surface = main_surface;
+    params.surface_region = &main_region;
+
+    switch (input_main->colorspace) {
+    case AVCOL_SPC_BT470BG:
+        params.surface_color_standard = VAProcColorStandardBT601;
+        break;
+    case AVCOL_SPC_BT709:
+        params.surface_color_standard = VAProcColorStandardBT709;
+        break;
+    default:
+        params.surface_color_standard = VAProcColorStandardNone;
+        break;
+    }
+
+    params.output_region = &output_region;
+    params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
+    params.output_color_standard = params.surface_color_standard;
+
+    memcpy(&subpic_params, &params, sizeof(subpic_params));
+    subpic_params.pipeline_flags |= VA_PROC_PIPELINE_FAST;
+    subpic_params.filter_flags |= VA_FILTER_SCALING_FAST;
+
+#if VA_CHECK_VERSION(2, 1, 0)
+    blend_state.global_alpha = ctx->overlay_alpha;
+    subpic_params.blend_state = &blend_state;
+#endif
+
+    subpic_params.surface = overlay_surface;
+    subpic_params.output_region = &overlay_region;
+
+    err = ff_vaapi_vpp_render_overlay(avctx, &params, &subpic_params, output_surface);
+    if (err < 0)
+        goto fail;
+
+    err = av_frame_copy_props(output, input_main);
+    if (err < 0)
+        goto fail;
+
+    av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
+           av_get_pix_fmt_name(output->format),
+           output->width, output->height, output->pts);
+
+    return ff_filter_frame(outlink, output);
+
+fail:
+    av_frame_free(&output);
+    return err;
+}
+
+static int overlay_vaapi_init_framesync(AVFilterContext *avctx)
+{
+    OverlayVAAPIContext *ctx = avctx->priv;
+    int ret, i;
+
+    ctx->fs.on_event = overlay_vaapi_blend;
+    ctx->fs.opaque   = ctx;
+    ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);
+    if (ret < 0)
+        return ret;
+
+    for (i = 0; i < avctx->nb_inputs; i++) {
+        FFFrameSyncIn *in = &ctx->fs.in[i];
+        in->before    = EXT_STOP;
+        in->after     = EXT_INFINITY;
+        in->sync      = i ? 1 : 2;
+        in->time_base = avctx->inputs[i]->time_base;
+    }
+
+    return ff_framesync_configure(&ctx->fs);
+}
+
+static int overlay_vaapi_config_output(AVFilterLink *outlink)
+{
+    AVFilterContext  *avctx  = outlink->src;
+    OverlayVAAPIContext *ctx = avctx->priv;
+    VAAPIVPPContext *vpp_ctx = avctx->priv;
+    AVFilterLink        *in0 = avctx->inputs[0];
+    AVFilterLink        *in1 = avctx->inputs[1];
+    int err;
+
+    av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n", av_get_pix_fmt_name(outlink->format));
+    if ((in0->format == AV_PIX_FMT_VAAPI && in1->format != AV_PIX_FMT_VAAPI) ||
+        (in0->format != AV_PIX_FMT_VAAPI && in1->format == AV_PIX_FMT_VAAPI)) {
+        av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software pixel formats is not supported.\n");
+        return AVERROR(EINVAL);
+    }
+
+    err = overlay_vaapi_init_framesync(avctx);
+    if (err < 0)
+        return err;
+
+    vpp_ctx->output_width  = avctx->inputs[0]->w;
+    vpp_ctx->output_height = avctx->inputs[0]->h;
+
+    err = ff_vaapi_vpp_config_output(outlink);
+    if (err < 0)
+        return err;
+
+    err = ff_framesync_init_dualinput(&ctx->fs, avctx);
+    if (err < 0)
+        return err;
+
+    return ff_framesync_configure(&ctx->fs);
+}
+
+static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
+{
+    VAAPIVPPContext *vpp_ctx = avctx->priv;
+
+    ff_vaapi_vpp_ctx_init(avctx);
+    vpp_ctx->output_format = AV_PIX_FMT_NONE;
+
+    return 0;
+}
+
+static int overlay_vaapi_activate(AVFilterContext *avctx)
+{
+    OverlayVAAPIContext *ctx = avctx->priv;
+
+    return ff_framesync_activate(&ctx->fs);
+}
+
+static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
+{
+    OverlayVAAPIContext *ctx = avctx->priv;
+
+    ff_framesync_uninit(&ctx->fs);
+}
+
+static AVFrame *get_video_buffer(AVFilterLink *inlink, int w, int h)
+{
+    return ff_default_get_video_buffer(inlink, w, h);
+}
+
+#define OFFSET(x) offsetof(OverlayVAAPIContext, x)
+#define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
+static const AVOption overlay_vaapi_options[] = {
+    { "x", "Overlay x position",
+      OFFSET(overlay_x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
+    { "y", "Overlay y position",
+      OFFSET(overlay_y), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
+    { "w", "Overlay width",
+      OFFSET(overlay_w), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
+    { "h", "Overlay hight",
+      OFFSET(overlay_h), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
+    { "alpha", "Overlay global alpha",
+      OFFSET(overlay_alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0}, 0.0, 1.0, .flags = FLAGS},
+    { NULL },
+};
+
+AVFILTER_DEFINE_CLASS(overlay_vaapi);
+
+static const AVFilterPad overlay_vaapi_inputs[] = {
+    {
+        .name             = "main",
+        .type             = AVMEDIA_TYPE_VIDEO,
+        .get_video_buffer = get_video_buffer,
+        .config_props     = &ff_vaapi_vpp_config_input,
+        .needs_fifo       = 1,
+    },
+    {
+        .name             = "overlay",
+        .type             = AVMEDIA_TYPE_VIDEO,
+        .get_video_buffer = get_video_buffer,
+        .config_props     = &ff_vaapi_vpp_config_input,
+        .needs_fifo       = 1,
+    },
+    { NULL }
+};
+
+static const AVFilterPad overlay_vaapi_outputs[] = {
+    {
+        .name          = "default",
+        .type          = AVMEDIA_TYPE_VIDEO,
+        .config_props  = &overlay_vaapi_config_output,
+    },
+    { NULL }
+};
+
+AVFilter ff_vf_overlay_vaapi = {
+    .name            = "overlay_vaapi",
+    .description     = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
+    .priv_size       = sizeof(OverlayVAAPIContext),
+    .priv_class      = &overlay_vaapi_class,
+    .init            = &overlay_vaapi_init,
+    .uninit          = &overlay_vaapi_uninit,
+    .query_formats   = &overlay_vaapi_query_formats,
+    .activate        = &overlay_vaapi_activate,
+    .inputs          = overlay_vaapi_inputs,
+    .outputs         = overlay_vaapi_outputs,
+    .flags_internal  = FF_FILTER_FLAG_HWFRAME_AWARE,
+};