diff mbox series

[FFmpeg-devel,v4,03/11] libavutil/hwcontext_d3d11va: adding more texture information to the D3D11 hwcontext API

Message ID 20200508151821.49051-3-artem.galin@gmail.com
State New
Headers show
Series [FFmpeg-devel,v4,01/11] fftools/qsv: enabling d3d11va/dxva2 device selection
Related show

Checks

Context Check Description
andriy/default pending
andriy/make success Make finished
andriy/make_fate success Make fate finished

Commit Message

Artem Galin May 8, 2020, 3:18 p.m. UTC
From: Artem Galin <artem.galin@intel.com>

Added AVD3D11FrameDescriptors array to store array of single textures in case if there is no way
to allocate array texture with BindFlags = D3D11_BIND_RENDER_TARGET.

Signed-off-by: Artem Galin <artem.galin@intel.com>
---
 libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
 libavutil/hwcontext_d3d11va.h |  9 +++++++++
 2 files changed, 29 insertions(+), 6 deletions(-)

Comments

Hendrik Leppkes May 8, 2020, 8:26 p.m. UTC | #1
On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
>
> From: Artem Galin <artem.galin@intel.com>
>
> Added AVD3D11FrameDescriptors array to store array of single textures in case if there is no way
> to allocate array texture with BindFlags = D3D11_BIND_RENDER_TARGET.
>
> Signed-off-by: Artem Galin <artem.galin@intel.com>
> ---
>  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
>  libavutil/hwcontext_d3d11va.h |  9 +++++++++
>  2 files changed, 29 insertions(+), 6 deletions(-)
>
> diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
> index c8ae58f908..cd80931dd3 100644
> --- a/libavutil/hwcontext_d3d11va.c
> +++ b/libavutil/hwcontext_d3d11va.c
> @@ -72,8 +72,8 @@ static av_cold void load_functions(void)
>  }
>
>  typedef struct D3D11VAFramesContext {
> -    int nb_surfaces_used;
> -
> +    size_t nb_surfaces;
> +    size_t nb_surfaces_used;
>      DXGI_FORMAT format;
>
>      ID3D11Texture2D *staging_texture;
> @@ -112,6 +112,8 @@ static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
>      if (s->staging_texture)
>          ID3D11Texture2D_Release(s->staging_texture);
>      s->staging_texture = NULL;
> +
> +    av_freep(&frames_hwctx->texture_infos);
>  }
>
>  static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
> @@ -152,8 +154,10 @@ static void free_texture(void *opaque, uint8_t *data)
>      av_free(data);
>  }
>
> -static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> +static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
>  {
> +    D3D11VAFramesContext              *s = ctx->internal->priv;
> +    AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
>      AVBufferRef *buf;
>      AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
>      if (!desc) {
> @@ -161,6 +165,10 @@ static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
>          return NULL;
>      }
>
> +    frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
> +    frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
> +    s->nb_surfaces_used++;
> +
>      desc->texture = tex;
>      desc->index   = index;
>
> @@ -199,7 +207,7 @@ static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
>          return NULL;
>      }
>
> -    return wrap_texture_buf(tex, 0);
> +    return wrap_texture_buf(ctx, tex, 0);
>  }
>
>  static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
> @@ -220,7 +228,7 @@ static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
>      }
>
>      ID3D11Texture2D_AddRef(hwctx->texture);
> -    return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
> +    return wrap_texture_buf(ctx, hwctx->texture, s->nb_surfaces_used);
>  }
>
>  static int d3d11va_frames_init(AVHWFramesContext *ctx)
> @@ -267,7 +275,7 @@ static int d3d11va_frames_init(AVHWFramesContext *ctx)
>              av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
>              return AVERROR(EINVAL);
>          }
> -    } else if (texDesc.ArraySize > 0) {
> +    } else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) && texDesc.ArraySize > 0) {
>          hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
>          if (FAILED(hr)) {
>              av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
> @@ -275,6 +283,12 @@ static int d3d11va_frames_init(AVHWFramesContext *ctx)
>          }
>      }
>
> +    hwctx->texture_infos = av_mallocz_array(ctx->initial_pool_size, sizeof(*hwctx->texture_infos));
> +    if (!hwctx->texture_infos)
> +        return AVERROR(ENOMEM);
> +
> +    s->nb_surfaces = ctx->initial_pool_size;
> +
>      ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
>                                                          ctx, d3d11va_pool_alloc, NULL);
>      if (!ctx->internal->pool_internal)
> diff --git a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
> index 9f91e9b1b6..295bdcd90d 100644
> --- a/libavutil/hwcontext_d3d11va.h
> +++ b/libavutil/hwcontext_d3d11va.h
> @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
>       * This field is ignored/invalid if a user-allocated texture is provided.
>       */
>      UINT MiscFlags;
> +
> +    /**
> +     * In case if texture structure member above is not NULL contains the same texture
> +     * pointer for all elements and different indexes into the array texture.
> +     * In case if texture structure member above is NULL, all elements contains
> +     * pointers to separate non-array textures and 0 indexes.
> +     * This field is ignored/invalid if a user-allocated texture is provided.
> +     */
> +    AVD3D11FrameDescriptor *texture_infos;
>  } AVD3D11VAFramesContext;
>


I'm not really a fan of this. Only supporting array textures was an
intentional design decision back when D3D11VA was defined, because it
greatly simplified the entire design - and as far as I know the
d3d11va decoder, for example, doesnt even support decoding into
anything else.

- Hendrik
Soft Works May 9, 2020, 12:11 a.m. UTC | #2
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Hendrik Leppkes
> Sent: Friday, May 8, 2020 10:27 PM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 
> On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
> >
> > From: Artem Galin <artem.galin@intel.com>
> >
> > Added AVD3D11FrameDescriptors array to store array of single textures
> > in case if there is no way to allocate array texture with BindFlags =
> D3D11_BIND_RENDER_TARGET.
> >
> > Signed-off-by: Artem Galin <artem.galin@intel.com>
> > ---
> >  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
> > libavutil/hwcontext_d3d11va.h |  9 +++++++++
> >  2 files changed, 29 insertions(+), 6 deletions(-)
> >
> > diff --git a/libavutil/hwcontext_d3d11va.c
> > b/libavutil/hwcontext_d3d11va.c index c8ae58f908..cd80931dd3 100644
> > --- a/libavutil/hwcontext_d3d11va.c
> > +++ b/libavutil/hwcontext_d3d11va.c
> > @@ -72,8 +72,8 @@ static av_cold void load_functions(void)  }
> >
> >  typedef struct D3D11VAFramesContext {
> > -    int nb_surfaces_used;
> > -
> > +    size_t nb_surfaces;
> > +    size_t nb_surfaces_used;
> >      DXGI_FORMAT format;
> >
> >      ID3D11Texture2D *staging_texture; @@ -112,6 +112,8 @@ static void
> > d3d11va_frames_uninit(AVHWFramesContext *ctx)
> >      if (s->staging_texture)
> >          ID3D11Texture2D_Release(s->staging_texture);
> >      s->staging_texture = NULL;
> > +
> > +    av_freep(&frames_hwctx->texture_infos);
> >  }
> >
> >  static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx, @@
> > -152,8 +154,10 @@ static void free_texture(void *opaque, uint8_t *data)
> >      av_free(data);
> >  }
> >
> > -static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> > +static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx,
> > +ID3D11Texture2D *tex, int index)
> >  {
> > +    D3D11VAFramesContext              *s = ctx->internal->priv;
> > +    AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> >      AVBufferRef *buf;
> >      AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
> >      if (!desc) {
> > @@ -161,6 +165,10 @@ static AVBufferRef
> *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> >          return NULL;
> >      }
> >
> > +    frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
> > +    frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
> > +    s->nb_surfaces_used++;
> > +
> >      desc->texture = tex;
> >      desc->index   = index;
> >
> > @@ -199,7 +207,7 @@ static AVBufferRef
> *d3d11va_alloc_single(AVHWFramesContext *ctx)
> >          return NULL;
> >      }
> >
> > -    return wrap_texture_buf(tex, 0);
> > +    return wrap_texture_buf(ctx, tex, 0);
> >  }
> >
> >  static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size) @@
> > -220,7 +228,7 @@ static AVBufferRef *d3d11va_pool_alloc(void *opaque,
> int size)
> >      }
> >
> >      ID3D11Texture2D_AddRef(hwctx->texture);
> > -    return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
> > +    return wrap_texture_buf(ctx, hwctx->texture,
> > + s->nb_surfaces_used);
> >  }
> >
> >  static int d3d11va_frames_init(AVHWFramesContext *ctx) @@ -267,7
> > +275,7 @@ static int d3d11va_frames_init(AVHWFramesContext *ctx)
> >              av_log(ctx, AV_LOG_ERROR, "User-provided texture has
> mismatching parameters\n");
> >              return AVERROR(EINVAL);
> >          }
> > -    } else if (texDesc.ArraySize > 0) {
> > +    } else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) &&
> > + texDesc.ArraySize > 0) {
> >          hr = ID3D11Device_CreateTexture2D(device_hwctx->device,
> &texDesc, NULL, &hwctx->texture);
> >          if (FAILED(hr)) {
> >              av_log(ctx, AV_LOG_ERROR, "Could not create the texture
> > (%lx)\n", (long)hr); @@ -275,6 +283,12 @@ static int
> d3d11va_frames_init(AVHWFramesContext *ctx)
> >          }
> >      }
> >
> > +    hwctx->texture_infos = av_mallocz_array(ctx->initial_pool_size,
> sizeof(*hwctx->texture_infos));
> > +    if (!hwctx->texture_infos)
> > +        return AVERROR(ENOMEM);
> > +
> > +    s->nb_surfaces = ctx->initial_pool_size;
> > +
> >      ctx->internal->pool_internal =
> av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
> >                                                          ctx, d3d11va_pool_alloc, NULL);
> >      if (!ctx->internal->pool_internal) diff --git
> > a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h index
> > 9f91e9b1b6..295bdcd90d 100644
> > --- a/libavutil/hwcontext_d3d11va.h
> > +++ b/libavutil/hwcontext_d3d11va.h
> > @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
> >       * This field is ignored/invalid if a user-allocated texture is provided.
> >       */
> >      UINT MiscFlags;
> > +
> > +    /**
> > +     * In case if texture structure member above is not NULL contains the
> same texture
> > +     * pointer for all elements and different indexes into the array texture.
> > +     * In case if texture structure member above is NULL, all elements
> contains
> > +     * pointers to separate non-array textures and 0 indexes.
> > +     * This field is ignored/invalid if a user-allocated texture is provided.
> > +     */
> > +    AVD3D11FrameDescriptor *texture_infos;
> >  } AVD3D11VAFramesContext;
> >
> 
> 
> I'm not really a fan of this. Only supporting array textures was an intentional
> design decision back when D3D11VA was defined, because it greatly
> simplified the entire design - and as far as I know the d3d11va decoder, for
> example, doesnt even support decoding into anything else.
> 
> - Hendrik

It's not like there would be a choice. The Intel MSDK uses an allocator mechanism
and when it asks for a non-array DX11 texture it has to be given one.

Only the very latest drivers may not have this requirement anymore, but only
14 months ago, it was impossible to get it working otherwise - using the latest
driver/MSDK version at that time. Actually, it's working mixed with array (decoders)
and non-array textures (filters/VPP), but not exclusively with array-textures.

softworkz
Hendrik Leppkes May 9, 2020, 7:07 a.m. UTC | #3
On Sat, May 9, 2020 at 2:12 AM Soft Works <softworkz@hotmail.com> wrote:
>
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Hendrik Leppkes
> > Sent: Friday, May 8, 2020 10:27 PM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
> > On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
> > >
> > > From: Artem Galin <artem.galin@intel.com>
> > >
> > > Added AVD3D11FrameDescriptors array to store array of single textures
> > > in case if there is no way to allocate array texture with BindFlags =
> > D3D11_BIND_RENDER_TARGET.
> > >
> > > Signed-off-by: Artem Galin <artem.galin@intel.com>
> > > ---
> > >  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
> > > libavutil/hwcontext_d3d11va.h |  9 +++++++++
> > >  2 files changed, 29 insertions(+), 6 deletions(-)
> > >
> > > diff --git a/libavutil/hwcontext_d3d11va.c
> > > b/libavutil/hwcontext_d3d11va.c index c8ae58f908..cd80931dd3 100644
> > > --- a/libavutil/hwcontext_d3d11va.c
> > > +++ b/libavutil/hwcontext_d3d11va.c
> > > @@ -72,8 +72,8 @@ static av_cold void load_functions(void)  }
> > >
> > >  typedef struct D3D11VAFramesContext {
> > > -    int nb_surfaces_used;
> > > -
> > > +    size_t nb_surfaces;
> > > +    size_t nb_surfaces_used;
> > >      DXGI_FORMAT format;
> > >
> > >      ID3D11Texture2D *staging_texture; @@ -112,6 +112,8 @@ static void
> > > d3d11va_frames_uninit(AVHWFramesContext *ctx)
> > >      if (s->staging_texture)
> > >          ID3D11Texture2D_Release(s->staging_texture);
> > >      s->staging_texture = NULL;
> > > +
> > > +    av_freep(&frames_hwctx->texture_infos);
> > >  }
> > >
> > >  static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx, @@
> > > -152,8 +154,10 @@ static void free_texture(void *opaque, uint8_t *data)
> > >      av_free(data);
> > >  }
> > >
> > > -static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> > > +static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx,
> > > +ID3D11Texture2D *tex, int index)
> > >  {
> > > +    D3D11VAFramesContext              *s = ctx->internal->priv;
> > > +    AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> > >      AVBufferRef *buf;
> > >      AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
> > >      if (!desc) {
> > > @@ -161,6 +165,10 @@ static AVBufferRef
> > *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> > >          return NULL;
> > >      }
> > >
> > > +    frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
> > > +    frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
> > > +    s->nb_surfaces_used++;
> > > +
> > >      desc->texture = tex;
> > >      desc->index   = index;
> > >
> > > @@ -199,7 +207,7 @@ static AVBufferRef
> > *d3d11va_alloc_single(AVHWFramesContext *ctx)
> > >          return NULL;
> > >      }
> > >
> > > -    return wrap_texture_buf(tex, 0);
> > > +    return wrap_texture_buf(ctx, tex, 0);
> > >  }
> > >
> > >  static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size) @@
> > > -220,7 +228,7 @@ static AVBufferRef *d3d11va_pool_alloc(void *opaque,
> > int size)
> > >      }
> > >
> > >      ID3D11Texture2D_AddRef(hwctx->texture);
> > > -    return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
> > > +    return wrap_texture_buf(ctx, hwctx->texture,
> > > + s->nb_surfaces_used);
> > >  }
> > >
> > >  static int d3d11va_frames_init(AVHWFramesContext *ctx) @@ -267,7
> > > +275,7 @@ static int d3d11va_frames_init(AVHWFramesContext *ctx)
> > >              av_log(ctx, AV_LOG_ERROR, "User-provided texture has
> > mismatching parameters\n");
> > >              return AVERROR(EINVAL);
> > >          }
> > > -    } else if (texDesc.ArraySize > 0) {
> > > +    } else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) &&
> > > + texDesc.ArraySize > 0) {
> > >          hr = ID3D11Device_CreateTexture2D(device_hwctx->device,
> > &texDesc, NULL, &hwctx->texture);
> > >          if (FAILED(hr)) {
> > >              av_log(ctx, AV_LOG_ERROR, "Could not create the texture
> > > (%lx)\n", (long)hr); @@ -275,6 +283,12 @@ static int
> > d3d11va_frames_init(AVHWFramesContext *ctx)
> > >          }
> > >      }
> > >
> > > +    hwctx->texture_infos = av_mallocz_array(ctx->initial_pool_size,
> > sizeof(*hwctx->texture_infos));
> > > +    if (!hwctx->texture_infos)
> > > +        return AVERROR(ENOMEM);
> > > +
> > > +    s->nb_surfaces = ctx->initial_pool_size;
> > > +
> > >      ctx->internal->pool_internal =
> > av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
> > >                                                          ctx, d3d11va_pool_alloc, NULL);
> > >      if (!ctx->internal->pool_internal) diff --git
> > > a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h index
> > > 9f91e9b1b6..295bdcd90d 100644
> > > --- a/libavutil/hwcontext_d3d11va.h
> > > +++ b/libavutil/hwcontext_d3d11va.h
> > > @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
> > >       * This field is ignored/invalid if a user-allocated texture is provided.
> > >       */
> > >      UINT MiscFlags;
> > > +
> > > +    /**
> > > +     * In case if texture structure member above is not NULL contains the
> > same texture
> > > +     * pointer for all elements and different indexes into the array texture.
> > > +     * In case if texture structure member above is NULL, all elements
> > contains
> > > +     * pointers to separate non-array textures and 0 indexes.
> > > +     * This field is ignored/invalid if a user-allocated texture is provided.
> > > +     */
> > > +    AVD3D11FrameDescriptor *texture_infos;
> > >  } AVD3D11VAFramesContext;
> > >
> >
> >
> > I'm not really a fan of this. Only supporting array textures was an intentional
> > design decision back when D3D11VA was defined, because it greatly
> > simplified the entire design - and as far as I know the d3d11va decoder, for
> > example, doesnt even support decoding into anything else.
> >
> > - Hendrik
>
> It's not like there would be a choice. The Intel MSDK uses an allocator mechanism
> and when it asks for a non-array DX11 texture it has to be given one.
>

Of course there is a choice. Only support the new stuff. Afterall we
havent supported it at all for years now, so only supporting it on
newer drivers isn't the end of the world.

- Hendrik
Hendrik Leppkes May 9, 2020, 7:18 a.m. UTC | #4
On Sat, May 9, 2020 at 9:07 AM Hendrik Leppkes <h.leppkes@gmail.com> wrote:
>
> On Sat, May 9, 2020 at 2:12 AM Soft Works <softworkz@hotmail.com> wrote:
> >
> > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > Hendrik Leppkes
> > > Sent: Friday, May 8, 2020 10:27 PM
> > > To: FFmpeg development discussions and patches <ffmpeg-
> > > devel@ffmpeg.org>
> > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > > adding more texture information to the D3D11 hwcontext API
> > >
> > > On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
> > > >
> > > > From: Artem Galin <artem.galin@intel.com>
> > > >
> > > > Added AVD3D11FrameDescriptors array to store array of single textures
> > > > in case if there is no way to allocate array texture with BindFlags =
> > > D3D11_BIND_RENDER_TARGET.
> > > >
> > > > Signed-off-by: Artem Galin <artem.galin@intel.com>
> > > > ---
> > > >  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
> > > > libavutil/hwcontext_d3d11va.h |  9 +++++++++
> > > >  2 files changed, 29 insertions(+), 6 deletions(-)
> > > >
> > > > diff --git a/libavutil/hwcontext_d3d11va.c
> > > > b/libavutil/hwcontext_d3d11va.c index c8ae58f908..cd80931dd3 100644
> > > > --- a/libavutil/hwcontext_d3d11va.c
> > > > +++ b/libavutil/hwcontext_d3d11va.c
> > > > @@ -72,8 +72,8 @@ static av_cold void load_functions(void)  }
> > > >
> > > >  typedef struct D3D11VAFramesContext {
> > > > -    int nb_surfaces_used;
> > > > -
> > > > +    size_t nb_surfaces;
> > > > +    size_t nb_surfaces_used;
> > > >      DXGI_FORMAT format;
> > > >
> > > >      ID3D11Texture2D *staging_texture; @@ -112,6 +112,8 @@ static void
> > > > d3d11va_frames_uninit(AVHWFramesContext *ctx)
> > > >      if (s->staging_texture)
> > > >          ID3D11Texture2D_Release(s->staging_texture);
> > > >      s->staging_texture = NULL;
> > > > +
> > > > +    av_freep(&frames_hwctx->texture_infos);
> > > >  }
> > > >
> > > >  static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx, @@
> > > > -152,8 +154,10 @@ static void free_texture(void *opaque, uint8_t *data)
> > > >      av_free(data);
> > > >  }
> > > >
> > > > -static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> > > > +static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx,
> > > > +ID3D11Texture2D *tex, int index)
> > > >  {
> > > > +    D3D11VAFramesContext              *s = ctx->internal->priv;
> > > > +    AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> > > >      AVBufferRef *buf;
> > > >      AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
> > > >      if (!desc) {
> > > > @@ -161,6 +165,10 @@ static AVBufferRef
> > > *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> > > >          return NULL;
> > > >      }
> > > >
> > > > +    frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
> > > > +    frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
> > > > +    s->nb_surfaces_used++;
> > > > +
> > > >      desc->texture = tex;
> > > >      desc->index   = index;
> > > >
> > > > @@ -199,7 +207,7 @@ static AVBufferRef
> > > *d3d11va_alloc_single(AVHWFramesContext *ctx)
> > > >          return NULL;
> > > >      }
> > > >
> > > > -    return wrap_texture_buf(tex, 0);
> > > > +    return wrap_texture_buf(ctx, tex, 0);
> > > >  }
> > > >
> > > >  static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size) @@
> > > > -220,7 +228,7 @@ static AVBufferRef *d3d11va_pool_alloc(void *opaque,
> > > int size)
> > > >      }
> > > >
> > > >      ID3D11Texture2D_AddRef(hwctx->texture);
> > > > -    return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
> > > > +    return wrap_texture_buf(ctx, hwctx->texture,
> > > > + s->nb_surfaces_used);
> > > >  }
> > > >
> > > >  static int d3d11va_frames_init(AVHWFramesContext *ctx) @@ -267,7
> > > > +275,7 @@ static int d3d11va_frames_init(AVHWFramesContext *ctx)
> > > >              av_log(ctx, AV_LOG_ERROR, "User-provided texture has
> > > mismatching parameters\n");
> > > >              return AVERROR(EINVAL);
> > > >          }
> > > > -    } else if (texDesc.ArraySize > 0) {
> > > > +    } else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) &&
> > > > + texDesc.ArraySize > 0) {
> > > >          hr = ID3D11Device_CreateTexture2D(device_hwctx->device,
> > > &texDesc, NULL, &hwctx->texture);
> > > >          if (FAILED(hr)) {
> > > >              av_log(ctx, AV_LOG_ERROR, "Could not create the texture
> > > > (%lx)\n", (long)hr); @@ -275,6 +283,12 @@ static int
> > > d3d11va_frames_init(AVHWFramesContext *ctx)
> > > >          }
> > > >      }
> > > >
> > > > +    hwctx->texture_infos = av_mallocz_array(ctx->initial_pool_size,
> > > sizeof(*hwctx->texture_infos));
> > > > +    if (!hwctx->texture_infos)
> > > > +        return AVERROR(ENOMEM);
> > > > +
> > > > +    s->nb_surfaces = ctx->initial_pool_size;
> > > > +
> > > >      ctx->internal->pool_internal =
> > > av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
> > > >                                                          ctx, d3d11va_pool_alloc, NULL);
> > > >      if (!ctx->internal->pool_internal) diff --git
> > > > a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h index
> > > > 9f91e9b1b6..295bdcd90d 100644
> > > > --- a/libavutil/hwcontext_d3d11va.h
> > > > +++ b/libavutil/hwcontext_d3d11va.h
> > > > @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
> > > >       * This field is ignored/invalid if a user-allocated texture is provided.
> > > >       */
> > > >      UINT MiscFlags;
> > > > +
> > > > +    /**
> > > > +     * In case if texture structure member above is not NULL contains the
> > > same texture
> > > > +     * pointer for all elements and different indexes into the array texture.
> > > > +     * In case if texture structure member above is NULL, all elements
> > > contains
> > > > +     * pointers to separate non-array textures and 0 indexes.
> > > > +     * This field is ignored/invalid if a user-allocated texture is provided.
> > > > +     */
> > > > +    AVD3D11FrameDescriptor *texture_infos;
> > > >  } AVD3D11VAFramesContext;
> > > >
> > >
> > >
> > > I'm not really a fan of this. Only supporting array textures was an intentional
> > > design decision back when D3D11VA was defined, because it greatly
> > > simplified the entire design - and as far as I know the d3d11va decoder, for
> > > example, doesnt even support decoding into anything else.
> > >
> > > - Hendrik
> >
> > It's not like there would be a choice. The Intel MSDK uses an allocator mechanism
> > and when it asks for a non-array DX11 texture it has to be given one.
> >
>
> Of course there is a choice. Only support the new stuff. Afterall we
> havent supported it at all for years now, so only supporting it on
> newer drivers isn't the end of the world.
>

To give an example for consistency:

d3d11va decoding will only ever decode into array-textures. So when I
use d3d11va decoding, and then try to encode with qsvenc, it still
fails on such systems, right?
And not only that, it'll fail in mysterious ways.

When I'm decoding with qsvdec and it produces a list of textures, and
the API user does not handle them, since its a new feature and a API
change, it'll break mysteriously again.

Adding a confusing alternate way to store textures in the context
seems less then ideal, even more so since its not necessary for
up-to-date drivers. Let Intel document the exact driver requirements,
and check it, fallback to d3d9 otherwise? Seems like an overall much
neater solution.
Bending our API to the needs of legacy drivers seems like something
that will cause headaches for years to come, while said hardware will
slowly just go away.

- Hendrik
Max Dmitrichenko May 9, 2020, 11:39 a.m. UTC | #5
On Sat, May 9, 2020 at 9:18 AM Hendrik Leppkes <h.leppkes@gmail.com> wrote:

> On Sat, May 9, 2020 at 9:07 AM Hendrik Leppkes <h.leppkes@gmail.com>
> wrote:
> >
> > On Sat, May 9, 2020 at 2:12 AM Soft Works <softworkz@hotmail.com> wrote:
> > >
> > > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > > Hendrik Leppkes
> > > > Sent: Friday, May 8, 2020 10:27 PM
> > > > To: FFmpeg development discussions and patches <ffmpeg-
> > > > devel@ffmpeg.org>
> > > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> libavutil/hwcontext_d3d11va:
> > > > adding more texture information to the D3D11 hwcontext API
> > > >
> > > > On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
> > > > >
> > > > > From: Artem Galin <artem.galin@intel.com>
> > > > >
> > > > > Added AVD3D11FrameDescriptors array to store array of single
> textures
> > > > > in case if there is no way to allocate array texture with
> BindFlags =
> > > > D3D11_BIND_RENDER_TARGET.
> > > > >
> > > > > Signed-off-by: Artem Galin <artem.galin@intel.com>
> > > > > ---
> > > > >  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
> > > > > libavutil/hwcontext_d3d11va.h |  9 +++++++++
> > > > >  2 files changed, 29 insertions(+), 6 deletions(-)
> > > > >
> > > > > diff --git a/libavutil/hwcontext_d3d11va.c
> > > > > b/libavutil/hwcontext_d3d11va.c index c8ae58f908..cd80931dd3 100644
> > > > > --- a/libavutil/hwcontext_d3d11va.c
> > > > > +++ b/libavutil/hwcontext_d3d11va.c
> > > > > @@ -72,8 +72,8 @@ static av_cold void load_functions(void)  }
> > > > >
> > > > >  typedef struct D3D11VAFramesContext {
> > > > > -    int nb_surfaces_used;
> > > > > -
> > > > > +    size_t nb_surfaces;
> > > > > +    size_t nb_surfaces_used;
> > > > >      DXGI_FORMAT format;
> > > > >
> > > > >      ID3D11Texture2D *staging_texture; @@ -112,6 +112,8 @@ static
> void
> > > > > d3d11va_frames_uninit(AVHWFramesContext *ctx)
> > > > >      if (s->staging_texture)
> > > > >          ID3D11Texture2D_Release(s->staging_texture);
> > > > >      s->staging_texture = NULL;
> > > > > +
> > > > > +    av_freep(&frames_hwctx->texture_infos);
> > > > >  }
> > > > >
> > > > >  static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
> @@
> > > > > -152,8 +154,10 @@ static void free_texture(void *opaque, uint8_t
> *data)
> > > > >      av_free(data);
> > > > >  }
> > > > >
> > > > > -static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int
> index)
> > > > > +static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx,
> > > > > +ID3D11Texture2D *tex, int index)
> > > > >  {
> > > > > +    D3D11VAFramesContext              *s = ctx->internal->priv;
> > > > > +    AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> > > > >      AVBufferRef *buf;
> > > > >      AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
> > > > >      if (!desc) {
> > > > > @@ -161,6 +165,10 @@ static AVBufferRef
> > > > *wrap_texture_buf(ID3D11Texture2D *tex, int index)
> > > > >          return NULL;
> > > > >      }
> > > > >
> > > > > +    frames_hwctx->texture_infos[s->nb_surfaces_used].texture =
> tex;
> > > > > +    frames_hwctx->texture_infos[s->nb_surfaces_used].index =
> index;
> > > > > +    s->nb_surfaces_used++;
> > > > > +
> > > > >      desc->texture = tex;
> > > > >      desc->index   = index;
> > > > >
> > > > > @@ -199,7 +207,7 @@ static AVBufferRef
> > > > *d3d11va_alloc_single(AVHWFramesContext *ctx)
> > > > >          return NULL;
> > > > >      }
> > > > >
> > > > > -    return wrap_texture_buf(tex, 0);
> > > > > +    return wrap_texture_buf(ctx, tex, 0);
> > > > >  }
> > > > >
> > > > >  static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size) @@
> > > > > -220,7 +228,7 @@ static AVBufferRef *d3d11va_pool_alloc(void
> *opaque,
> > > > int size)
> > > > >      }
> > > > >
> > > > >      ID3D11Texture2D_AddRef(hwctx->texture);
> > > > > -    return wrap_texture_buf(hwctx->texture,
> s->nb_surfaces_used++);
> > > > > +    return wrap_texture_buf(ctx, hwctx->texture,
> > > > > + s->nb_surfaces_used);
> > > > >  }
> > > > >
> > > > >  static int d3d11va_frames_init(AVHWFramesContext *ctx) @@ -267,7
> > > > > +275,7 @@ static int d3d11va_frames_init(AVHWFramesContext *ctx)
> > > > >              av_log(ctx, AV_LOG_ERROR, "User-provided texture has
> > > > mismatching parameters\n");
> > > > >              return AVERROR(EINVAL);
> > > > >          }
> > > > > -    } else if (texDesc.ArraySize > 0) {
> > > > > +    } else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) &&
> > > > > + texDesc.ArraySize > 0) {
> > > > >          hr = ID3D11Device_CreateTexture2D(device_hwctx->device,
> > > > &texDesc, NULL, &hwctx->texture);
> > > > >          if (FAILED(hr)) {
> > > > >              av_log(ctx, AV_LOG_ERROR, "Could not create the
> texture
> > > > > (%lx)\n", (long)hr); @@ -275,6 +283,12 @@ static int
> > > > d3d11va_frames_init(AVHWFramesContext *ctx)
> > > > >          }
> > > > >      }
> > > > >
> > > > > +    hwctx->texture_infos =
> av_mallocz_array(ctx->initial_pool_size,
> > > > sizeof(*hwctx->texture_infos));
> > > > > +    if (!hwctx->texture_infos)
> > > > > +        return AVERROR(ENOMEM);
> > > > > +
> > > > > +    s->nb_surfaces = ctx->initial_pool_size;
> > > > > +
> > > > >      ctx->internal->pool_internal =
> > > > av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
> > > > >                                                          ctx,
> d3d11va_pool_alloc, NULL);
> > > > >      if (!ctx->internal->pool_internal) diff --git
> > > > > a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
> index
> > > > > 9f91e9b1b6..295bdcd90d 100644
> > > > > --- a/libavutil/hwcontext_d3d11va.h
> > > > > +++ b/libavutil/hwcontext_d3d11va.h
> > > > > @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
> > > > >       * This field is ignored/invalid if a user-allocated texture
> is provided.
> > > > >       */
> > > > >      UINT MiscFlags;
> > > > > +
> > > > > +    /**
> > > > > +     * In case if texture structure member above is not NULL
> contains the
> > > > same texture
> > > > > +     * pointer for all elements and different indexes into the
> array texture.
> > > > > +     * In case if texture structure member above is NULL, all
> elements
> > > > contains
> > > > > +     * pointers to separate non-array textures and 0 indexes.
> > > > > +     * This field is ignored/invalid if a user-allocated texture
> is provided.
> > > > > +     */
> > > > > +    AVD3D11FrameDescriptor *texture_infos;
> > > > >  } AVD3D11VAFramesContext;
> > > > >
> > > >
> > > >
> > > > I'm not really a fan of this. Only supporting array textures was an
> intentional
> > > > design decision back when D3D11VA was defined, because it greatly
> > > > simplified the entire design - and as far as I know the d3d11va
> decoder, for
> > > > example, doesnt even support decoding into anything else.
> > > >
> > > > - Hendrik
> > >
> > > It's not like there would be a choice. The Intel MSDK uses an
> allocator mechanism
> > > and when it asks for a non-array DX11 texture it has to be given one.
> > >
> >
> > Of course there is a choice. Only support the new stuff. Afterall we
> > havent supported it at all for years now, so only supporting it on
> > newer drivers isn't the end of the world.
> >
>
> To give an example for consistency:
>
> d3d11va decoding will only ever decode into array-textures.



Patch focuses on this and only, including encode
DCH driver supports such configuration back to 6th Gen Intel(R) Core(TM)
processor family (Codename Skylake)



> So when I
> use d3d11va decoding, and then try to encode with qsvenc, it still
> fails on such systems, right?
>


see above about driver version recommendations
and with latest - it is not expected to fail.



> And not only that, it'll fail in mysterious ways.
>
> When I'm decoding with qsvdec and it produces a list of textures, and
> the API user does not handle them, since its a new feature and a API
> change, it'll break mysteriously again.
>
> Adding a confusing alternate way to store textures in the context
> seems less then ideal, even more so since its not necessary for
> up-to-date drivers. Let Intel document the exact driver requirements,
> and check it, fallback to d3d9 otherwise? Seems like an overall much
> neater solution.
> Bending our API to the needs of legacy drivers seems like something
> that will cause headaches for years to come, while said hardware will
> slowly just go away.
>
>
DCH driver is available from
https://www.intel.com/content/www/us/en/support/articles/000031572/programs/intel-corporation.html
Common assumption here - to use latest driver available,
most likely, it is needless to document.

d3d9 path still available, in case of any problems around d3d11va

it is bigger issue to complete abandon DX11 support with its advantages.



> - Hendrik
>



Question about array-textures: are there any confirmation that with any
BindFlags combination it is should be possible to create such texture?
Most importantly D3D11_BIND_RENDER_TARGET.

if this is cleared,  texture_infos can be removed.

Regards
Max
Hendrik Leppkes May 9, 2020, 1:17 p.m. UTC | #6
On Sat, May 9, 2020 at 2:11 PM Max Dmitrichenko <maxim.d33@gmail.com> wrote:
>
> Question about array-textures: are there any confirmation that with any
> BindFlags combination it is should be possible to create such texture?
> Most importantly D3D11_BIND_RENDER_TARGET.
>

More interestingly, is there any evidence that this is in fact not possible?
I see no mention of any limitations like that in the D3D11 documentation.

Where does this condition then come from?

- Hendrik
Mark Thompson May 9, 2020, 2:37 p.m. UTC | #7
On 08/05/2020 21:26, Hendrik Leppkes wrote:
> On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
>>
>> From: Artem Galin <artem.galin@intel.com>
>>
>> Added AVD3D11FrameDescriptors array to store array of single textures in case if there is no way
>> to allocate array texture with BindFlags = D3D11_BIND_RENDER_TARGET.
>>
>> Signed-off-by: Artem Galin <artem.galin@intel.com>
>> ---
>>  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
>>  libavutil/hwcontext_d3d11va.h |  9 +++++++++
>>  2 files changed, 29 insertions(+), 6 deletions(-)
>>
>> ...
>> diff --git a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
>> index 9f91e9b1b6..295bdcd90d 100644
>> --- a/libavutil/hwcontext_d3d11va.h
>> +++ b/libavutil/hwcontext_d3d11va.h
>> @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
>>       * This field is ignored/invalid if a user-allocated texture is provided.
>>       */
>>      UINT MiscFlags;
>> +
>> +    /**
>> +     * In case if texture structure member above is not NULL contains the same texture
>> +     * pointer for all elements and different indexes into the array texture.
>> +     * In case if texture structure member above is NULL, all elements contains
>> +     * pointers to separate non-array textures and 0 indexes.
>> +     * This field is ignored/invalid if a user-allocated texture is provided.
>> +     */
>> +    AVD3D11FrameDescriptor *texture_infos;
>>  } AVD3D11VAFramesContext;
>>
> 
> 
> I'm not really a fan of this. Only supporting array textures was an
> intentional design decision back when D3D11VA was defined, because it
> greatly simplified the entire design - and as far as I know the
> d3d11va decoder, for example, doesnt even support decoding into
> anything else.

For an decoder, yes, because the set of things to render to can easily be constrained.

For an encoder, you want to support more cases then just textures generated by a decoder, and ideally that would allow arbitrary textures with the right properties so that the encoder is not weirdly gimped (compare NVENC, which does accept any texture).  The barrier to making that work is this horrible texture preregistration requirement where we need to be able to find all of the textures which might be used up front, not the single/array texture difference.  While changing the API here is not fun, following the method used for the same problem with D3D9 surfaces seems like the simplest way to make it all work nicely.

Possibly I am not understanding something here, though - I don't see what this has to do with the setting of D3D11_BIND_RENDER_TARGET (and in particular why the code discards the array index if this flag is set).

- Mark
Max Dmitrichenko May 9, 2020, 5:18 p.m. UTC | #8
On Sat, May 9, 2020 at 3:18 PM Hendrik Leppkes <h.leppkes@gmail.com> wrote:

> On Sat, May 9, 2020 at 2:11 PM Max Dmitrichenko <maxim.d33@gmail.com>
> wrote:
> >
> > Question about array-textures: are there any confirmation that with any
> > BindFlags combination it is should be possible to create such texture?
> > Most importantly D3D11_BIND_RENDER_TARGET.
> >
>
> More interestingly, is there any evidence that this is in fact not
> possible?
> I see no mention of any limitations like that in the D3D11 documentation.
>
> Where does this condition then come from?
>
>
MSFT documentation is not share much details,
Try following https://pastebin.com/iCPrwUem
see #if condition for easy check

regards
Max
Hendrik Leppkes May 9, 2020, 5:40 p.m. UTC | #9
On Sat, May 9, 2020 at 5:09 PM Mark Thompson <sw@jkqxz.net> wrote:
>
> On 08/05/2020 21:26, Hendrik Leppkes wrote:
> > On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
> >>
> >> From: Artem Galin <artem.galin@intel.com>
> >>
> >> Added AVD3D11FrameDescriptors array to store array of single textures in case if there is no way
> >> to allocate array texture with BindFlags = D3D11_BIND_RENDER_TARGET.
> >>
> >> Signed-off-by: Artem Galin <artem.galin@intel.com>
> >> ---
> >>  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
> >>  libavutil/hwcontext_d3d11va.h |  9 +++++++++
> >>  2 files changed, 29 insertions(+), 6 deletions(-)
> >>
> >> ...
> >> diff --git a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
> >> index 9f91e9b1b6..295bdcd90d 100644
> >> --- a/libavutil/hwcontext_d3d11va.h
> >> +++ b/libavutil/hwcontext_d3d11va.h
> >> @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
> >>       * This field is ignored/invalid if a user-allocated texture is provided.
> >>       */
> >>      UINT MiscFlags;
> >> +
> >> +    /**
> >> +     * In case if texture structure member above is not NULL contains the same texture
> >> +     * pointer for all elements and different indexes into the array texture.
> >> +     * In case if texture structure member above is NULL, all elements contains
> >> +     * pointers to separate non-array textures and 0 indexes.
> >> +     * This field is ignored/invalid if a user-allocated texture is provided.
> >> +     */
> >> +    AVD3D11FrameDescriptor *texture_infos;
> >>  } AVD3D11VAFramesContext;
> >>
> >
> >
> > I'm not really a fan of this. Only supporting array textures was an
> > intentional design decision back when D3D11VA was defined, because it
> > greatly simplified the entire design - and as far as I know the
> > d3d11va decoder, for example, doesnt even support decoding into
> > anything else.
>
> For an decoder, yes, because the set of things to render to can easily be constrained.
>
> For an encoder, you want to support more cases then just textures generated by a decoder, and ideally that would allow arbitrary textures with the right properties so that the encoder is not weirdly gimped (compare NVENC, which does accept any texture).  The barrier to making that work is this horrible texture preregistration requirement where we need to be able to find all of the textures which might be used up front, not the single/array texture difference.  While changing the API here is not fun, following the method used for the same problem with D3D9 surfaces seems like the simplest way to make it all work nicely.
>

If that is the goal, wouldn't it be ideal for an encoder to work just
with a device context, and not require a frame context?

- Hendrik
Soft Works May 9, 2020, 5:41 p.m. UTC | #10
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Hendrik Leppkes
> Sent: Saturday, May 9, 2020 9:08 AM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 

> > >
> > > I'm not really a fan of this. Only supporting array textures was an
> > > intentional design decision back when D3D11VA was defined, because
> > > it greatly simplified the entire design - and as far as I know the
> > > d3d11va decoder, for example, doesnt even support decoding into
> anything else.
> > >
> > > - Hendrik
> >
> > It's not like there would be a choice. The Intel MSDK uses an
> > allocator mechanism and when it asks for a non-array DX11 texture it has to
> be given one.
> >
> 
> Of course there is a choice. Only support the new stuff. Afterall we havent
> supported it at all for years now, so only supporting it on newer drivers isn't
> the end of the world.

It _IS_ the end of the world when at the same time, the default will be 
switched to DX11 because this will automatically create many cases 
where things will fail which have been working previously.

An automatic fallback is not a good alternative either because that would
break specific adapter selection by adapter number because the numbering
is different between D3D9 and DX11.

Assuming that everybody would have the latest driver is not matching the
situation in the real world. See here for an example: 
https://github.com/softworkz/ffmpeg_dx11/issues/1

The (similar) implementation that I have done, is running on fine on thousands 
of installations for about a year now. 
With the currently proposed solution, we would be unable to ship the product.

Best regards,

softworkz
Hendrik Leppkes May 9, 2020, 5:48 p.m. UTC | #11
On Sat, May 9, 2020 at 7:18 PM Max Dmitrichenko <maxim.d33@gmail.com> wrote:
>
> On Sat, May 9, 2020 at 3:18 PM Hendrik Leppkes <h.leppkes@gmail.com> wrote:
>
> > On Sat, May 9, 2020 at 2:11 PM Max Dmitrichenko <maxim.d33@gmail.com>
> > wrote:
> > >
> > > Question about array-textures: are there any confirmation that with any
> > > BindFlags combination it is should be possible to create such texture?
> > > Most importantly D3D11_BIND_RENDER_TARGET.
> > >
> >
> > More interestingly, is there any evidence that this is in fact not
> > possible?
> > I see no mention of any limitations like that in the D3D11 documentation.
> >
> > Where does this condition then come from?
> >
> >
> MSFT documentation is not share much details,
> Try following https://pastebin.com/iCPrwUem
> see #if condition for easy check
>

Did you ever try reducing the number of bind flags?
Flagging it both for decoder and renderer usage at the same time seems
slightly overkill to me, afterall only one of those processes is going
to fill the texture, and you could (and maybe should) use a different
pool if you want renderer target textures, for eg. filtering?

- Hendrik
Hendrik Leppkes May 9, 2020, 5:53 p.m. UTC | #12
On Sat, May 9, 2020 at 7:41 PM Soft Works <softworkz@hotmail.com> wrote:
>
> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Hendrik Leppkes
> > Sent: Saturday, May 9, 2020 9:08 AM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
>
> > > >
> > > > I'm not really a fan of this. Only supporting array textures was an
> > > > intentional design decision back when D3D11VA was defined, because
> > > > it greatly simplified the entire design - and as far as I know the
> > > > d3d11va decoder, for example, doesnt even support decoding into
> > anything else.
> > > >
> > > > - Hendrik
> > >
> > > It's not like there would be a choice. The Intel MSDK uses an
> > > allocator mechanism and when it asks for a non-array DX11 texture it has to
> > be given one.
> > >
> >
> > Of course there is a choice. Only support the new stuff. Afterall we havent
> > supported it at all for years now, so only supporting it on newer drivers isn't
> > the end of the world.
>
> It _IS_ the end of the world when at the same time, the default will be
> switched to DX11 because this will automatically create many cases
> where things will fail which have been working previously.
>
> An automatic fallback is not a good alternative either because that would
> break specific adapter selection by adapter number because the numbering
> is different between D3D9 and DX11.
>
> Assuming that everybody would have the latest driver is not matching the
> situation in the real world. See here for an example:
> https://github.com/softworkz/ffmpeg_dx11/issues/1
>

According to your own documentation, even the proposed DX11 version
will still fail on various systems (only bullet point 3 is solved from
the list in issue 2, leaving 1 and 2)
So either you have a fallback, or DX11 should probably just not be the
default at all then.

- Hendrik
Soft Works May 9, 2020, 6:13 p.m. UTC | #13
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Hendrik Leppkes
> > >
> >
> > Of course there is a choice. Only support the new stuff. Afterall we
> > havent supported it at all for years now, so only supporting it on
> > newer drivers isn't the end of the world.
> >
> 
> To give an example for consistency:
> 
> d3d11va decoding will only ever decode into array-textures. So when I use
> d3d11va decoding, and then try to encode with qsvenc, it still fails on such
> systems, right?
> And not only that, it'll fail in mysterious ways.
> 
> When I'm decoding with qsvdec and it produces a list of textures, and the API
> user does not handle them, since its a new feature and a API change, it'll
> break mysteriously again.

Nothing will break when ffmpeg supports non-array textures, neither expected
nor mysteriously.

> Adding a confusing alternate way to store textures in the context seems less
> then ideal, even more so since its not necessary for up-to-date drivers. Let

It's not a confusing alternate way; non-array textures are just an alternate way
that hasn't been implemented in the ffmpeg code so far, that's all.

If you don't like the way how it's done, here's an alternate way that is probably 
a bit cleaner but also more verbose: https://github.com/softworkz/ffmpeg_dx11/commit/c09cc37ce7f513717493e060df740aa0e7374257

(if you wonder about the disabled locking: it's not required at all - neither for Intel,
Nvidia or AMD D3D11VA decoders, we could discuss that separately if you wish)

Best regards,
softworkz
Max Dmitrichenko May 9, 2020, 6:28 p.m. UTC | #14
On Sat, May 9, 2020 at 7:48 PM Hendrik Leppkes <h.leppkes@gmail.com> wrote:

> On Sat, May 9, 2020 at 7:18 PM Max Dmitrichenko <maxim.d33@gmail.com>
> wrote:
> >
> > On Sat, May 9, 2020 at 3:18 PM Hendrik Leppkes <h.leppkes@gmail.com>
> wrote:
> >
> > > On Sat, May 9, 2020 at 2:11 PM Max Dmitrichenko <maxim.d33@gmail.com>
> > > wrote:
> > > >
> > > > Question about array-textures: are there any confirmation that with
> any
> > > > BindFlags combination it is should be possible to create such
> texture?
> > > > Most importantly D3D11_BIND_RENDER_TARGET.
> > > >
> > >
> > > More interestingly, is there any evidence that this is in fact not
> > > possible?
> > > I see no mention of any limitations like that in the D3D11
> documentation.
> > >
> > > Where does this condition then come from?
> > >
> > >
> > MSFT documentation is not share much details,
> > Try following https://pastebin.com/iCPrwUem
> > see #if condition for easy check
> >
>
> Did you ever try reducing the number of bind flags?
> Flagging it both for decoder and renderer usage at the same time seems
> slightly overkill to me, afterall only one of those processes is going
> to fill the texture, and you could (and maybe should) use a different
> pool if you want renderer target textures, for eg. filtering?
>
>
both flags: D3D11_BIND_DECODER ( output from the decoder API ) and
D3D11_BIND_VIDEO_ENCODER
(input from the video encoder API) work fine
for ArraySize > 1 /  array-textures
where more complex cases , like with D3D11_BIND_RENDER_TARGET seems to be
gap
with one texture in  AVHWFramesContext,

unless we have several  AVHWFramesContext,
is this a case?

regards
Max
Soft Works May 9, 2020, 6:31 p.m. UTC | #15
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Hendrik Leppkes
> Sent: Saturday, May 9, 2020 7:54 PM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 
> On Sat, May 9, 2020 at 7:41 PM Soft Works <softworkz@hotmail.com> wrote:
> >
> > > -----Original Message-----
> > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > Hendrik Leppkes
> > > Sent: Saturday, May 9, 2020 9:08 AM
> > > To: FFmpeg development discussions and patches <ffmpeg-
> > > devel@ffmpeg.org>
> > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> libavutil/hwcontext_d3d11va:
> > > adding more texture information to the D3D11 hwcontext API
> > >
> >
> > > > >
> > > > > I'm not really a fan of this. Only supporting array textures was
> > > > > an intentional design decision back when D3D11VA was defined,
> > > > > because it greatly simplified the entire design - and as far as
> > > > > I know the d3d11va decoder, for example, doesnt even support
> > > > > decoding into
> > > anything else.
> > > > >
> > > > > - Hendrik
> > > >
> > > > It's not like there would be a choice. The Intel MSDK uses an
> > > > allocator mechanism and when it asks for a non-array DX11 texture
> > > > it has to
> > > be given one.
> > > >
> > >
> > > Of course there is a choice. Only support the new stuff. Afterall we
> > > havent supported it at all for years now, so only supporting it on
> > > newer drivers isn't the end of the world.
> >
> > It _IS_ the end of the world when at the same time, the default will
> > be switched to DX11 because this will automatically create many cases
> > where things will fail which have been working previously.
> >
> > An automatic fallback is not a good alternative either because that
> > would break specific adapter selection by adapter number because the
> > numbering is different between D3D9 and DX11.
> >
> > Assuming that everybody would have the latest driver is not matching
> > the situation in the real world. See here for an example:
> > https://github.com/softworkz/ffmpeg_dx11/issues/1
> >
> 
> According to your own documentation, even the proposed DX11 version will
> still fail on various systems (only bullet point 3 is solved from the list in issue
> 2, leaving 1 and 2) So either you have a fallback, or DX11 should probably just
> not be the default at all then.

From a perspective of a normal ffmpeg command line user, my position is
that DX11 should not be made the default because it will break command 
lines that have been working before. (in a significant amount of cases)

Even more important, though: The behavior should be deterministic, which 
means that ffmpeg should not make an automatic decision (consider the 
device selection by adapter number).

Intel discrete graphic adapters are coming this year, so device selection 
will become even more important than before as you will be able to have
even multiple Intel graphics adapters.

Regarding your question above ("only the 3rd bullet point is addressed"): 
We implemented  a comprehensive device detection which gives us detailed
Information about all hardware devices, their drivers, supported codecs and
capabilities. 
This allows us to detect Intel adapters having drivers below MSDK version 1.21,
and for those we're using D3D9 only.

But for all other versions (MSDK 1.21 to 1.31), we _want_ to be able to use
DX11 because it allows using without a connected display and without an
active user session (e.g. Windows service). 

So, the impact is two-fold:
- We would be in the position to choose D3D9, but we "want" to have it
  working on all the other driver versions
- For a regular user, who cannot easily determine the MSDK version (or should
  not be needed to bother doing so), the resulting ffmpeg behavior would  
  be failure in a lot of cases

softworkz
Max Dmitrichenko May 9, 2020, 8:12 p.m. UTC | #16
On Sat, May 9, 2020 at 8:31 PM Soft Works <softworkz@hotmail.com> wrote:

> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Hendrik Leppkes
> > Sent: Saturday, May 9, 2020 7:54 PM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
> > On Sat, May 9, 2020 at 7:41 PM Soft Works <softworkz@hotmail.com> wrote:
> > >
> > > > -----Original Message-----
> > > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > > Hendrik Leppkes
> > > > Sent: Saturday, May 9, 2020 9:08 AM
> > > > To: FFmpeg development discussions and patches <ffmpeg-
> > > > devel@ffmpeg.org>
> > > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> > libavutil/hwcontext_d3d11va:
> > > > adding more texture information to the D3D11 hwcontext API
> > > >
> > >
> > > > > >
> > > > > > I'm not really a fan of this. Only supporting array textures was
> > > > > > an intentional design decision back when D3D11VA was defined,
> > > > > > because it greatly simplified the entire design - and as far as
> > > > > > I know the d3d11va decoder, for example, doesnt even support
> > > > > > decoding into
> > > > anything else.
> > > > > >
> > > > > > - Hendrik
> > > > >
> > > > > It's not like there would be a choice. The Intel MSDK uses an
> > > > > allocator mechanism and when it asks for a non-array DX11 texture
> > > > > it has to
> > > > be given one.
> > > > >
> > > >
> > > > Of course there is a choice. Only support the new stuff. Afterall we
> > > > havent supported it at all for years now, so only supporting it on
> > > > newer drivers isn't the end of the world.
> > >
> > > It _IS_ the end of the world when at the same time, the default will
> > > be switched to DX11 because this will automatically create many cases
> > > where things will fail which have been working previously.
> > >
> > > An automatic fallback is not a good alternative either because that
> > > would break specific adapter selection by adapter number because the
> > > numbering is different between D3D9 and DX11.
> > >
> > > Assuming that everybody would have the latest driver is not matching
> > > the situation in the real world. See here for an example:
> > > https://github.com/softworkz/ffmpeg_dx11/issues/1
> > >
> >
> > According to your own documentation, even the proposed DX11 version will
> > still fail on various systems (only bullet point 3 is solved from the
> list in issue
> > 2, leaving 1 and 2) So either you have a fallback, or DX11 should
> probably just
> > not be the default at all then.
>
> From a perspective of a normal ffmpeg command line user, my position is
> that DX11 should not be made the default because it will break command
> lines that have been working before. (in a significant amount of cases)
>
>
This switch is documented and explained with reasons.
a normal ffmpeg command line user has no association like with headless HW
setup and DX11.
Even, as stated below, discrete graphics adapters setup will benefit from
default'ing to DX11.

it has to be reasonable decision but not purely focusing on support of
somehow aged HW,
before 6th Gen Intel(R) Core(TM) processor family (Codename Skylake).


> Even more important, though: The behavior should be deterministic, which
> means that ffmpeg should not make an automatic decision (consider the
> device selection by adapter number).
>
>
behavior stays deterministic with option for explicit selection of DX
version.
also, no drop of DX9 is considered.


> Intel discrete graphic adapters are coming this year, so device selection
> will become even more important than before as you will be able to have
> even multiple Intel graphics adapters.
>
> Regarding your question above ("only the 3rd bullet point is addressed"):
> We implemented  a comprehensive device detection which gives us detailed
> Information about all hardware devices, their drivers, supported codecs and
> capabilities.
> This allows us to detect Intel adapters having drivers below MSDK version
> 1.21,
> and for those we're using D3D9 only.
>
> But for all other versions (MSDK 1.21 to 1.31), we _want_ to be able to use
> DX11 because it allows using without a connected display and without an
> active user session (e.g. Windows service).
>
>
are there prepared patch to consider?


> So, the impact is two-fold:
> - We would be in the position to choose D3D9, but we "want" to have it
>   working on all the other driver versions
> - For a regular user, who cannot easily determine the MSDK version (or
> should
>   not be needed to bother doing so), the resulting ffmpeg behavior would
>   be failure in a lot of cases
>
> softworkz
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".



regards
Max
Max Dmitrichenko May 9, 2020, 8:15 p.m. UTC | #17
On Sat, May 9, 2020 at 8:13 PM Soft Works <softworkz@hotmail.com> wrote:

> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Hendrik Leppkes
> > > >
> > >
> > > Of course there is a choice. Only support the new stuff. Afterall we
> > > havent supported it at all for years now, so only supporting it on
> > > newer drivers isn't the end of the world.
> > >
> >
> > To give an example for consistency:
> >
> > d3d11va decoding will only ever decode into array-textures. So when I use
> > d3d11va decoding, and then try to encode with qsvenc, it still fails on
> such
> > systems, right?
> > And not only that, it'll fail in mysterious ways.
> >
> > When I'm decoding with qsvdec and it produces a list of textures, and
> the API
> > user does not handle them, since its a new feature and a API change,
> it'll
> > break mysteriously again.
>
> Nothing will break when ffmpeg supports non-array textures, neither
> expected
> nor mysteriously.
>
>
it is not matter of this patches list,
can it be discussed in other patch review thread?


> > Adding a confusing alternate way to store textures in the context seems
> less
> > then ideal, even more so since its not necessary for up-to-date drivers.
> Let
>
> It's not a confusing alternate way; non-array textures are just an
> alternate way
> that hasn't been implemented in the ffmpeg code so far, that's all.
>
> If you don't like the way how it's done, here's an alternate way that is
> probably
> a bit cleaner but also more verbose:
> https://github.com/softworkz/ffmpeg_dx11/commit/c09cc37ce7f513717493e060df740aa0e7374257
>
> (if you wonder about the disabled locking: it's not required at all -
> neither for Intel,
> Nvidia or AMD D3D11VA decoders, we could discuss that separately if you
> wish)
>
> Best regards,
> softworkz
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".


regards
Max
Soft Works May 9, 2020, 8:25 p.m. UTC | #18
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Max Dmitrichenko
> Sent: Saturday, May 9, 2020 10:13 PM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 
> On Sat, May 9, 2020 at 8:31 PM Soft Works <softworkz@hotmail.com> wrote:
> 
> > > -----Original Message-----
> > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > Hendrik Leppkes
> > > Sent: Saturday, May 9, 2020 7:54 PM
> > > To: FFmpeg development discussions and patches <ffmpeg-
> > > devel@ffmpeg.org>
> > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> libavutil/hwcontext_d3d11va:
> > > adding more texture information to the D3D11 hwcontext API
> > >
> > > On Sat, May 9, 2020 at 7:41 PM Soft Works <softworkz@hotmail.com>
> wrote:
> > > >
> > > > > -----Original Message-----
> > > > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On
> Behalf
> > > > > Of Hendrik Leppkes
> > > > > Sent: Saturday, May 9, 2020 9:08 AM
> > > > > To: FFmpeg development discussions and patches <ffmpeg-
> > > > > devel@ffmpeg.org>
> > > > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> > > libavutil/hwcontext_d3d11va:
> > > > > adding more texture information to the D3D11 hwcontext API
> > > > >
> > > >
> > > > > > >
> > > > > > > I'm not really a fan of this. Only supporting array textures
> > > > > > > was an intentional design decision back when D3D11VA was
> > > > > > > defined, because it greatly simplified the entire design -
> > > > > > > and as far as I know the d3d11va decoder, for example,
> > > > > > > doesnt even support decoding into
> > > > > anything else.
> > > > > > >
> > > > > > > - Hendrik
> > > > > >
> > > > > > It's not like there would be a choice. The Intel MSDK uses an
> > > > > > allocator mechanism and when it asks for a non-array DX11
> > > > > > texture it has to
> > > > > be given one.
> > > > > >
> > > > >
> > > > > Of course there is a choice. Only support the new stuff.
> > > > > Afterall we havent supported it at all for years now, so only
> > > > > supporting it on newer drivers isn't the end of the world.
> > > >
> > > > It _IS_ the end of the world when at the same time, the default
> > > > will be switched to DX11 because this will automatically create
> > > > many cases where things will fail which have been working previously.
> > > >
> > > > An automatic fallback is not a good alternative either because
> > > > that would break specific adapter selection by adapter number
> > > > because the numbering is different between D3D9 and DX11.
> > > >
> > > > Assuming that everybody would have the latest driver is not
> > > > matching the situation in the real world. See here for an example:
> > > > https://github.com/softworkz/ffmpeg_dx11/issues/1
> > > >
> > >
> > > According to your own documentation, even the proposed DX11 version
> > > will still fail on various systems (only bullet point 3 is solved
> > > from the
> > list in issue
> > > 2, leaving 1 and 2) So either you have a fallback, or DX11 should
> > probably just
> > > not be the default at all then.
> >
> > From a perspective of a normal ffmpeg command line user, my position
> > is that DX11 should not be made the default because it will break
> > command lines that have been working before. (in a significant amount
> > of cases)
> >
> >
> This switch is documented and explained with reasons.
> a normal ffmpeg command line user has no association like with headless HW
> setup and DX11.

You can document that wherever you want, but for existing users, 
command lines that have always been working before will stop working
in a significant number of cases.

> > Even more important, though: The behavior should be deterministic,
> > which means that ffmpeg should not make an automatic decision
> > (consider the device selection by adapter number).
> >
> >
> behavior stays deterministic with option for explicit selection of DX version.
> also, no drop of DX9 is considered.

That was primarily a response to Hendrik's thought about automatically
falling back to D3D9.

But still, even with your patch: What will happen when DX11 is not available?

Will it 

1. fail?
2. or will it use D3D9 instead?

In case of 2, it's non-deterministic, and when the user has specified an 
adapter number for DX11, it might use the wrong adapter when ffmpeg 
uses D3D9 instead (because D3D9 and DX11 are counting adapters differently)

Regards,
softworkz
Soft Works May 9, 2020, 8:31 p.m. UTC | #19
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Max Dmitrichenko
> Sent: Saturday, May 9, 2020 10:16 PM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 
> On Sat, May 9, 2020 at 8:13 PM Soft Works <softworkz@hotmail.com> wrote:
> 
> > > -----Original Message-----
> > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > Hendrik Leppkes
> > > > >
> > > >
> > > > Of course there is a choice. Only support the new stuff. Afterall
> > > > we havent supported it at all for years now, so only supporting it
> > > > on newer drivers isn't the end of the world.
> > > >
> > >
> > > To give an example for consistency:
> > >
> > > d3d11va decoding will only ever decode into array-textures. So when
> > > I use d3d11va decoding, and then try to encode with qsvenc, it still
> > > fails on
> > such
> > > systems, right?
> > > And not only that, it'll fail in mysterious ways.
> > >
> > > When I'm decoding with qsvdec and it produces a list of textures,
> > > and
> > the API
> > > user does not handle them, since its a new feature and a API change,
> > it'll
> > > break mysteriously again.
> >
> > Nothing will break when ffmpeg supports non-array textures, neither
> > expected nor mysteriously.
> >
> >
> it is not matter of this patches list,
> can it be discussed in other patch review thread?

It is very relevant to this patch set, because supporting non-array textures
or not has a big impact on the number of cases that your patch will break.

Making DX11 the default AND not making sure that command lines will 
keep working widely (with driver versions as are deployed in the real world
and not just the very latest ones), that are two things that do not 
go together very well, IMHO.

Regards,
softworkz
Max Dmitrichenko May 9, 2020, 8:41 p.m. UTC | #20
On Sat, May 9, 2020 at 10:26 PM Soft Works <softworkz@hotmail.com> wrote:

> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Max Dmitrichenko
> > Sent: Saturday, May 9, 2020 10:13 PM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
> > On Sat, May 9, 2020 at 8:31 PM Soft Works <softworkz@hotmail.com> wrote:
> >
> > > > -----Original Message-----
> > > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > > Hendrik Leppkes
> > > > Sent: Saturday, May 9, 2020 7:54 PM
> > > > To: FFmpeg development discussions and patches <ffmpeg-
> > > > devel@ffmpeg.org>
> > > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> > libavutil/hwcontext_d3d11va:
> > > > adding more texture information to the D3D11 hwcontext API
> > > >
> > > > On Sat, May 9, 2020 at 7:41 PM Soft Works <softworkz@hotmail.com>
> > wrote:
> > > > >
> > > > > > -----Original Message-----
> > > > > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On
> > Behalf
> > > > > > Of Hendrik Leppkes
> > > > > > Sent: Saturday, May 9, 2020 9:08 AM
> > > > > > To: FFmpeg development discussions and patches <ffmpeg-
> > > > > > devel@ffmpeg.org>
> > > > > > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11]
> > > > libavutil/hwcontext_d3d11va:
> > > > > > adding more texture information to the D3D11 hwcontext API
> > > > > >
> > > > >
> > > > > > > >
> > > > > > > > I'm not really a fan of this. Only supporting array textures
> > > > > > > > was an intentional design decision back when D3D11VA was
> > > > > > > > defined, because it greatly simplified the entire design -
> > > > > > > > and as far as I know the d3d11va decoder, for example,
> > > > > > > > doesnt even support decoding into
> > > > > > anything else.
> > > > > > > >
> > > > > > > > - Hendrik
> > > > > > >
> > > > > > > It's not like there would be a choice. The Intel MSDK uses an
> > > > > > > allocator mechanism and when it asks for a non-array DX11
> > > > > > > texture it has to
> > > > > > be given one.
> > > > > > >
> > > > > >
> > > > > > Of course there is a choice. Only support the new stuff.
> > > > > > Afterall we havent supported it at all for years now, so only
> > > > > > supporting it on newer drivers isn't the end of the world.
> > > > >
> > > > > It _IS_ the end of the world when at the same time, the default
> > > > > will be switched to DX11 because this will automatically create
> > > > > many cases where things will fail which have been working
> previously.
> > > > >
> > > > > An automatic fallback is not a good alternative either because
> > > > > that would break specific adapter selection by adapter number
> > > > > because the numbering is different between D3D9 and DX11.
> > > > >
> > > > > Assuming that everybody would have the latest driver is not
> > > > > matching the situation in the real world. See here for an example:
> > > > > https://github.com/softworkz/ffmpeg_dx11/issues/1
> > > > >
> > > >
> > > > According to your own documentation, even the proposed DX11 version
> > > > will still fail on various systems (only bullet point 3 is solved
> > > > from the
> > > list in issue
> > > > 2, leaving 1 and 2) So either you have a fallback, or DX11 should
> > > probably just
> > > > not be the default at all then.
> > >
> > > From a perspective of a normal ffmpeg command line user, my position
> > > is that DX11 should not be made the default because it will break
> > > command lines that have been working before. (in a significant amount
> > > of cases)
> > >
> > >
> > This switch is documented and explained with reasons.
> > a normal ffmpeg command line user has no association like with headless
> HW
> > setup and DX11.
>
> You can document that wherever you want, but for existing users,
> command lines that have always been working before will stop working
> in a significant number of cases.
>
>
not sure where it goes:  if a user uses new version - there are new aspects
to consider


> > > Even more important, though: The behavior should be deterministic,
> > > which means that ffmpeg should not make an automatic decision
> > > (consider the device selection by adapter number).
> > >
> > >
> > behavior stays deterministic with option for explicit selection of DX
> version.
> > also, no drop of DX9 is considered.
>
> That was primarily a response to Hendrik's thought about automatically
> falling back to D3D9.
>
> But still, even with your patch: What will happen when DX11 is not
> available?
>
> Will it
>
> 1. fail?
> 2. or will it use D3D9 instead?
>
>
there should be no secrets here - implementation is available,
do you see fallback implementation?

we shouldn't forget that target app that uses FFMPEG can have the own
preferences
and configuration,
with this patch - it is possible to use DX11, unlike before.


> In case of 2, it's non-deterministic, and when the user has specified an
> adapter number for DX11, it might use the wrong adapter when ffmpeg
> uses D3D9 instead (because D3D9 and DX11 are counting adapters differently)
>
> Regards,
> softworkz
> _______________________________________________
>
>

regards
Max
Max Dmitrichenko May 9, 2020, 8:52 p.m. UTC | #21
On Sat, May 9, 2020 at 10:32 PM Soft Works <softworkz@hotmail.com> wrote:

> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Max Dmitrichenko
> > Sent: Saturday, May 9, 2020 10:16 PM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
> > On Sat, May 9, 2020 at 8:13 PM Soft Works <softworkz@hotmail.com> wrote:
> >
> > > > -----Original Message-----
> > > > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > > > Hendrik Leppkes
> > > > > >
> > > > >
> > > > > Of course there is a choice. Only support the new stuff. Afterall
> > > > > we havent supported it at all for years now, so only supporting it
> > > > > on newer drivers isn't the end of the world.
> > > > >
> > > >
> > > > To give an example for consistency:
> > > >
> > > > d3d11va decoding will only ever decode into array-textures. So when
> > > > I use d3d11va decoding, and then try to encode with qsvenc, it still
> > > > fails on
> > > such
> > > > systems, right?
> > > > And not only that, it'll fail in mysterious ways.
> > > >
> > > > When I'm decoding with qsvdec and it produces a list of textures,
> > > > and
> > > the API
> > > > user does not handle them, since its a new feature and a API change,
> > > it'll
> > > > break mysteriously again.
> > >
> > > Nothing will break when ffmpeg supports non-array textures, neither
> > > expected nor mysteriously.
> > >
> > >
> > it is not matter of this patches list,
> > can it be discussed in other patch review thread?
>
> It is very relevant to this patch set, because supporting non-array
> textures
> or not has a big impact on the number of cases that your patch will break.
>
>
this patch is not offering  "non-array textures" support for DX11 and
decode/encode.
would it be only reasonable to discuss such topic with  "non-array
textures" patch ?


> Making DX11 the default AND not making sure that command lines will
> keep working widely (with driver versions as are deployed in the real world
> and not just the very latest ones), that are two things that do not
> go together very well, IMHO.
>
> Regards,
> softworkz
>
>
>
regards
Max
Max Dmitrichenko May 9, 2020, 8:58 p.m. UTC | #22
On Sat, May 9, 2020 at 5:09 PM Mark Thompson <sw@jkqxz.net> wrote:

> On 08/05/2020 21:26, Hendrik Leppkes wrote:
> > On Fri, May 8, 2020 at 5:51 PM <artem.galin@gmail.com> wrote:
> >>
> >> From: Artem Galin <artem.galin@intel.com>
> >>
> >> Added AVD3D11FrameDescriptors array to store array of single textures
> in case if there is no way
> >> to allocate array texture with BindFlags = D3D11_BIND_RENDER_TARGET.
> >>
> >> Signed-off-by: Artem Galin <artem.galin@intel.com>
> >> ---
> >>  libavutil/hwcontext_d3d11va.c | 26 ++++++++++++++++++++------
> >>  libavutil/hwcontext_d3d11va.h |  9 +++++++++
> >>  2 files changed, 29 insertions(+), 6 deletions(-)
> >>
> >> ...
> >> diff --git a/libavutil/hwcontext_d3d11va.h
> b/libavutil/hwcontext_d3d11va.h
> >> index 9f91e9b1b6..295bdcd90d 100644
> >> --- a/libavutil/hwcontext_d3d11va.h
> >> +++ b/libavutil/hwcontext_d3d11va.h
> >> @@ -164,6 +164,15 @@ typedef struct AVD3D11VAFramesContext {
> >>       * This field is ignored/invalid if a user-allocated texture is
> provided.
> >>       */
> >>      UINT MiscFlags;
> >> +
> >> +    /**
> >> +     * In case if texture structure member above is not NULL contains
> the same texture
> >> +     * pointer for all elements and different indexes into the array
> texture.
> >> +     * In case if texture structure member above is NULL, all elements
> contains
> >> +     * pointers to separate non-array textures and 0 indexes.
> >> +     * This field is ignored/invalid if a user-allocated texture is
> provided.
> >> +     */
> >> +    AVD3D11FrameDescriptor *texture_infos;
> >>  } AVD3D11VAFramesContext;
> >>
> >
> >
> > I'm not really a fan of this. Only supporting array textures was an
> > intentional design decision back when D3D11VA was defined, because it
> > greatly simplified the entire design - and as far as I know the
> > d3d11va decoder, for example, doesnt even support decoding into
> > anything else.
>
> For an decoder, yes, because the set of things to render to can easily be
> constrained.
>
> For an encoder, you want to support more cases then just textures
> generated by a decoder, and ideally that would allow arbitrary textures
> with the right properties so that the encoder is not weirdly gimped
> (compare NVENC, which does accept any texture).  The barrier to making that
> work is this horrible texture preregistration requirement where we need to
> be able to find all of the textures which might be used up front, not the
> single/array texture difference.  While changing the API here is not fun,
> following the method used for the same problem with D3D9 surfaces seems
> like the simplest way to make it all work nicely.
>
> Possibly I am not understanding something here, though - I don't see what
> this has to do with the setting of D3D11_BIND_RENDER_TARGET (and in
> particular why the code discards the array index if this flag is set).
>
>
D3D11_BIND_RENDER_TARGET are not required for decode/encode but
later filters/VPP:
https://github.com/Intel-Media-SDK/MediaSDK/blob/9890dff6064a7f8fe738899fc0c39b33a2d181b5/samples/sample_common/src/d3d11_allocator.cpp#L461

I am re-checking if this is really a MUST condition by any of the
requirements

regards
Max
Soft Works May 9, 2020, 9:29 p.m. UTC | #23
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Max Dmitrichenko
> Sent: Saturday, May 9, 2020 10:42 PM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 
> > You can document that wherever you want, but for existing users,
> > command lines that have always been working before will stop working
> > in a significant number of cases.
> >
> >
> not sure where it goes:  if a user uses new version - there are new aspects to
> consider

Is that so?  Whether or not that is acceptable for ffmpeg is not up to me 
to decide, though. I'm just trying to illustrate the consequences.


> > But still, even with your patch: What will happen when DX11 is not
> > available?
> >
> > Will it
> >
> > 1. fail?
> > 2. or will it use D3D9 instead?
> >
> >
> there should be no secrets here - implementation is available, do you see
> fallback implementation?

Yes, I'm seeing that indeed:

Mark Thompson asked:
>> > +    mfxIMPL          impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
>>
>> Does that still do the right thing on systems where only D3D9 works?

You replied:
> yes, it is only a hint for D11 when possible.

I would understand 'when possible' in a way that D3D9 would be used
'when not possible'.

If that's true, it means, that an ffmpeg user executing a command line 
cannot be sure whether D3D9 or DX11 will be used. That would mean
that the behavior is not deterministic.

Regards,
softworkz
Artem Galin May 9, 2020, 11:18 p.m. UTC | #24
On Sat, 9 May 2020 at 22:29, Soft Works <softworkz@hotmail.com> wrote:

> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Max Dmitrichenko
> > Sent: Saturday, May 9, 2020 10:42 PM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
> > > You can document that wherever you want, but for existing users,
> > > command lines that have always been working before will stop working
> > > in a significant number of cases.
> > >
> > >
> > not sure where it goes:  if a user uses new version - there are new
> aspects to
> > consider
>
> Is that so?  Whether or not that is acceptable for ffmpeg is not up to me
> to decide, though. I'm just trying to illustrate the consequences.
>
>
> > > But still, even with your patch: What will happen when DX11 is not
> > > available?
> > >
> > > Will it
> > >
>
Please read the patch

> > > 1. fail?
>
Yes

> > > 2. or will it use D3D9 instead?
>
No automatic fallback to D3D9, D3D9 is selected by explicit command line
only

> > >
> > >
> > there should be no secrets here - implementation is available, do you see
> > fallback implementation?
>
> Yes, I'm seeing that indeed:
>
> Mark Thompson asked:
> >> > +    mfxIMPL          impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
> >>
> >> Does that still do the right thing on systems where only D3D9 works?
>
yes, it works with DX9. This flag is mandatory only for DX11.

>
> You replied:
> > yes, it is only a hint for D11 when possible.
>
> I would understand 'when possible' in a way that D3D9 would be used
> 'when not possible'.
>
> If that's true, it means, that an ffmpeg user executing a command line
> cannot be sure whether D3D9 or DX11 will be used. That would mean
> that the behavior is not deterministic.
>
This is not true, it is your assumptions.
Behavior is deterministic:
DX11 by default.
DX9 if user selects it explicitly via command line.

>
> Regards,
> softworkz
>
>
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".


Regards,
Artem.
Soft Works May 9, 2020, 11:55 p.m. UTC | #25
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> Artem Galin
> Sent: Sunday, May 10, 2020 1:19 AM
> To: FFmpeg development discussions and patches <ffmpeg-
> devel@ffmpeg.org>
> Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> adding more texture information to the D3D11 hwcontext API
> 
> On Sat, 9 May 2020 at 22:29, Soft Works <softworkz@hotmail.com> wrote:
> 

> > You replied:
> > > yes, it is only a hint for D11 when possible.
> >
> > I would understand 'when possible' in a way that D3D9 would be used
> > 'when not possible'.
> >
> > If that's true, it means, that an ffmpeg user executing a command line
> > cannot be sure whether D3D9 or DX11 will be used. That would mean that
> > the behavior is not deterministic.
> >
> This is not true, it is your assumptions.
> Behavior is deterministic:
> DX11 by default.
> DX9 if user selects it explicitly via command line.

> > > > But still, even with your patch: What will happen when DX11 is not
> > > > available?
> > > >
> > > > Will it
> > > >
> >
> Please read the patch
> 
> > > > 1. fail?
> >

> Yes


Thanks for confirming this. Deterministic behavior instead of auto-selection 
Is important and should not be changed.

But on the other side, it needs to be clear to everybody, that this once again 
adds even more to the list of situations that the patch will break: In this case, 
it's all Windows versions where DX11 is not available.

Best regards,
softworkz
Artem Galin May 23, 2020, 3:17 p.m. UTC | #26
New version of patch is available by link
https://patchwork.ffmpeg.org/project/ffmpeg/list/?series=1253

On Sun, 10 May 2020 at 00:55, Soft Works <softworkz@hotmail.com> wrote:

> > -----Original Message-----
> > From: ffmpeg-devel <ffmpeg-devel-bounces@ffmpeg.org> On Behalf Of
> > Artem Galin
> > Sent: Sunday, May 10, 2020 1:19 AM
> > To: FFmpeg development discussions and patches <ffmpeg-
> > devel@ffmpeg.org>
> > Subject: Re: [FFmpeg-devel] [PATCH v4 03/11] libavutil/hwcontext_d3d11va:
> > adding more texture information to the D3D11 hwcontext API
> >
> > On Sat, 9 May 2020 at 22:29, Soft Works <softworkz@hotmail.com> wrote:
> >
>
> > > You replied:
> > > > yes, it is only a hint for D11 when possible.
> > >
> > > I would understand 'when possible' in a way that D3D9 would be used
> > > 'when not possible'.
> > >
> > > If that's true, it means, that an ffmpeg user executing a command line
> > > cannot be sure whether D3D9 or DX11 will be used. That would mean that
> > > the behavior is not deterministic.
> > >
> > This is not true, it is your assumptions.
> > Behavior is deterministic:
> > DX11 by default.
> > DX9 if user selects it explicitly via command line.
>
> > > > > But still, even with your patch: What will happen when DX11 is not
> > > > > available?
> > > > >
> > > > > Will it
> > > > >
> > >
> > Please read the patch
> >
> > > > > 1. fail?
> > >
>
> > Yes
>
>
> Thanks for confirming this. Deterministic behavior instead of
> auto-selection
> Is important and should not be changed.
>
> But on the other side, it needs to be clear to everybody, that this once
> again
> adds even more to the list of situations that the patch will break: In
> this case,
> it's all Windows versions where DX11 is not available.
>
> Best regards,
> softworkz
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
diff mbox series

Patch

diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
index c8ae58f908..cd80931dd3 100644
--- a/libavutil/hwcontext_d3d11va.c
+++ b/libavutil/hwcontext_d3d11va.c
@@ -72,8 +72,8 @@  static av_cold void load_functions(void)
 }
 
 typedef struct D3D11VAFramesContext {
-    int nb_surfaces_used;
-
+    size_t nb_surfaces;
+    size_t nb_surfaces_used;
     DXGI_FORMAT format;
 
     ID3D11Texture2D *staging_texture;
@@ -112,6 +112,8 @@  static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
     if (s->staging_texture)
         ID3D11Texture2D_Release(s->staging_texture);
     s->staging_texture = NULL;
+
+    av_freep(&frames_hwctx->texture_infos);
 }
 
 static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
@@ -152,8 +154,10 @@  static void free_texture(void *opaque, uint8_t *data)
     av_free(data);
 }
 
-static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
+static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
 {
+    D3D11VAFramesContext              *s = ctx->internal->priv;
+    AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
     AVBufferRef *buf;
     AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
     if (!desc) {
@@ -161,6 +165,10 @@  static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
         return NULL;
     }
 
+    frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
+    frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
+    s->nb_surfaces_used++;
+
     desc->texture = tex;
     desc->index   = index;
 
@@ -199,7 +207,7 @@  static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
         return NULL;
     }
 
-    return wrap_texture_buf(tex, 0);
+    return wrap_texture_buf(ctx, tex, 0);
 }
 
 static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
@@ -220,7 +228,7 @@  static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
     }
 
     ID3D11Texture2D_AddRef(hwctx->texture);
-    return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
+    return wrap_texture_buf(ctx, hwctx->texture, s->nb_surfaces_used);
 }
 
 static int d3d11va_frames_init(AVHWFramesContext *ctx)
@@ -267,7 +275,7 @@  static int d3d11va_frames_init(AVHWFramesContext *ctx)
             av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
             return AVERROR(EINVAL);
         }
-    } else if (texDesc.ArraySize > 0) {
+    } else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) && texDesc.ArraySize > 0) {
         hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
         if (FAILED(hr)) {
             av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
@@ -275,6 +283,12 @@  static int d3d11va_frames_init(AVHWFramesContext *ctx)
         }
     }
 
+    hwctx->texture_infos = av_mallocz_array(ctx->initial_pool_size, sizeof(*hwctx->texture_infos));
+    if (!hwctx->texture_infos)
+        return AVERROR(ENOMEM);
+
+    s->nb_surfaces = ctx->initial_pool_size;
+
     ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
                                                         ctx, d3d11va_pool_alloc, NULL);
     if (!ctx->internal->pool_internal)
diff --git a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
index 9f91e9b1b6..295bdcd90d 100644
--- a/libavutil/hwcontext_d3d11va.h
+++ b/libavutil/hwcontext_d3d11va.h
@@ -164,6 +164,15 @@  typedef struct AVD3D11VAFramesContext {
      * This field is ignored/invalid if a user-allocated texture is provided.
      */
     UINT MiscFlags;
+
+    /**
+     * In case if texture structure member above is not NULL contains the same texture
+     * pointer for all elements and different indexes into the array texture.
+     * In case if texture structure member above is NULL, all elements contains
+     * pointers to separate non-array textures and 0 indexes.
+     * This field is ignored/invalid if a user-allocated texture is provided.
+     */
+    AVD3D11FrameDescriptor *texture_infos;
 } AVD3D11VAFramesContext;
 
 #endif /* AVUTIL_HWCONTEXT_D3D11VA_H */