diff mbox

[FFmpeg-devel,4/4] qsv: Merge libav implementation

Message ID c1897d8c-6063-411b-78e2-c69e8cb28f48@jkqxz.net
State Superseded
Headers show

Commit Message

Mark Thompson Oct. 26, 2016, 7:50 p.m. UTC
Merged as-at libav 398f015, and therefore includes outstanding
skipped merges 04b17ff and 130e1f1.

All features not in libav are preserved, and no options change.
---
v3.

Changes from v2:
- Profile seems to be required by the decoder in some cases; always pass in the correct value.
- Fix parsing for MPEG-2; now works with streams which are not exactly macroblock-aligned.
- Fix H.265 parsing when the decoder is enabled.
- Add H.265 HW decode plugin ID for MSS 2017.


 libavcodec/qsv.c          | 333 ++++++++++++++++-----------
 libavcodec/qsv_internal.h |  38 ++--
 libavcodec/qsvdec.c       | 569 +++++++++++++++++++---------------------------
 libavcodec/qsvdec.h       |  30 +--
 libavcodec/qsvdec_h2645.c |  29 ++-
 libavcodec/qsvdec_mpeg2.c |  85 ++++++-
 libavcodec/qsvdec_vc1.c   |  89 +++++++-
 libavcodec/qsvenc.c       | 158 +++++++------
 libavcodec/qsvenc.h       |   6 +-
 libavcodec/qsvenc_h264.c  |   3 +-
 10 files changed, 744 insertions(+), 596 deletions(-)

Comments

Hendrik Leppkes Oct. 31, 2016, 1:50 p.m. UTC | #1
On Wed, Oct 26, 2016 at 9:50 PM, Mark Thompson <sw@jkqxz.net> wrote:
> Merged as-at libav 398f015, and therefore includes outstanding
> skipped merges 04b17ff and 130e1f1.
>
> All features not in libav are preserved, and no options change.
> ---

LGTM, this should make further work on this much easier and bring it
back to a point where its actually stable - and integrates with the
existing HW landscape (hwcontext et al).

- Hendrik
Mark Thompson Oct. 31, 2016, 7:45 p.m. UTC | #2
On 31/10/16 13:50, Hendrik Leppkes wrote:
> On Wed, Oct 26, 2016 at 9:50 PM, Mark Thompson <sw@jkqxz.net> wrote:
>> Merged as-at libav 398f015, and therefore includes outstanding
>> skipped merges 04b17ff and 130e1f1.
>>
>> All features not in libav are preserved, and no options change.
>> ---
> 
> LGTM, this should make further work on this much easier and bring it
> back to a point where its actually stable - and integrates with the
> existing HW landscape (hwcontext et al).

Applied.

Thank you to everyone who assisted with testing and reviewing this series.

- Mark
diff mbox

Patch

diff --git a/libavcodec/qsv.c b/libavcodec/qsv.c
index 11d453d..efd7cea 100644
--- a/libavcodec/qsv.c
+++ b/libavcodec/qsv.c
@@ -25,7 +25,10 @@ 
 #include <string.h>

 #include "libavutil/avstring.h"
+#include "libavutil/common.h"
 #include "libavutil/error.h"
+#include "libavutil/hwcontext.h"
+#include "libavutil/hwcontext_qsv.h"

 #include "avcodec.h"
 #include "qsv_internal.h"
@@ -51,6 +54,22 @@  int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
     return AVERROR(ENOSYS);
 }

+int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile)
+{
+    if (profile == FF_PROFILE_UNKNOWN)
+        return MFX_PROFILE_UNKNOWN;
+    switch (codec_id) {
+    case AV_CODEC_ID_H264:
+    case AV_CODEC_ID_HEVC:
+        return profile;
+    case AV_CODEC_ID_VC1:
+        return 4 * profile + 1;
+    case AV_CODEC_ID_MPEG2VIDEO:
+        return 0x10 * profile;
+    }
+    return MFX_PROFILE_UNKNOWN;
+}
+
 int ff_qsv_error(int mfx_err)
 {
     switch (mfx_err) {
@@ -85,90 +104,58 @@  int ff_qsv_error(int mfx_err)
         return AVERROR_UNKNOWN;
     }
 }
-static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
+
+static int qsv_load_plugins(mfxSession session, const char *load_plugins,
+                            void *logctx)
 {
-    // this code is only required for Linux.  It searches for a valid
-    // display handle.  First in /dev/dri/renderD then in /dev/dri/card
-#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
-    // VAAPI display handle
-    int ret = 0;
-    VADisplay va_dpy = NULL;
-    VAStatus va_res = VA_STATUS_SUCCESS;
-    int major_version = 0, minor_version = 0;
-    int fd = -1;
-    char adapterpath[256];
-    int adapter_num;
-
-    qs->fd_display = -1;
-    qs->va_display = NULL;
-
-    //search for valid graphics device
-    for (adapter_num = 0;adapter_num < 6;adapter_num++) {
-
-        if (adapter_num<3) {
-            snprintf(adapterpath,sizeof(adapterpath),
-                "/dev/dri/renderD%d", adapter_num+128);
-        } else {
-            snprintf(adapterpath,sizeof(adapterpath),
-                "/dev/dri/card%d", adapter_num-3);
-        }
+    if (!load_plugins || !*load_plugins)
+        return 0;

-        fd = open(adapterpath, O_RDWR);
-        if (fd < 0) {
-            av_log(avctx, AV_LOG_ERROR,
-                "mfx init: %s fd open failed\n", adapterpath);
-            continue;
-        }
+    while (*load_plugins) {
+        mfxPluginUID uid;
+        mfxStatus ret;
+        int i, err = 0;

-        va_dpy = vaGetDisplayDRM(fd);
-        if (!va_dpy) {
-            av_log(avctx, AV_LOG_ERROR,
-                "mfx init: %s vaGetDisplayDRM failed\n", adapterpath);
-            close(fd);
-            continue;
+        char *plugin = av_get_token(&load_plugins, ":");
+        if (!plugin)
+            return AVERROR(ENOMEM);
+        if (strlen(plugin) != 2 * sizeof(uid.Data)) {
+            av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
+            err = AVERROR(EINVAL);
+            goto load_plugin_fail;
         }

-        va_res = vaInitialize(va_dpy, &major_version, &minor_version);
-        if (VA_STATUS_SUCCESS != va_res) {
-            av_log(avctx, AV_LOG_ERROR,
-                "mfx init: %s vaInitialize failed\n", adapterpath);
-            close(fd);
-            fd = -1;
-            continue;
-        } else {
-            av_log(avctx, AV_LOG_VERBOSE,
-            "mfx initialization: %s vaInitialize successful\n",adapterpath);
-            qs->fd_display = fd;
-            qs->va_display = va_dpy;
-            ret = MFXVideoCORE_SetHandle(qs->session,
-                  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)va_dpy);
-            if (ret < 0) {
-                av_log(avctx, AV_LOG_ERROR,
-                "Error %d during set display handle\n", ret);
-                return ff_qsv_error(ret);
+        for (i = 0; i < sizeof(uid.Data); i++) {
+            err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
+            if (err != 1) {
+                av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
+                err = AVERROR(EINVAL);
+                goto load_plugin_fail;
             }
-            break;
+
+        }
+
+        ret = MFXVideoUSER_Load(session, &uid, 1);
+        if (ret < 0) {
+            av_log(logctx, AV_LOG_ERROR, "Could not load the requested plugin: %s\n",
+                   plugin);
+            err = ff_qsv_error(ret);
+            goto load_plugin_fail;
         }
+
+        if (*load_plugins)
+            load_plugins++;
+load_plugin_fail:
+        av_freep(&plugin);
+        if (err < 0)
+            return err;
     }
-#endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
+
     return 0;
+
 }
-/**
- * @brief Initialize a MSDK session
- *
- * Media SDK is based on sessions, so this is the prerequisite
- * initialization for HW acceleration.  For Windows the session is
- * complete and ready to use, for Linux a display handle is
- * required.  For releases of Media Server Studio >= 2015 R4 the
- * render nodes interface is preferred (/dev/dri/renderD).
- * Using Media Server Studio 2015 R4 or newer is recommended
- * but the older /dev/dri/card interface is also searched
- * for broader compatibility.
- *
- * @param avctx    ffmpeg metadata for this codec context
- * @param session  the MSDK session used
- */
-int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
+
+int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
                                  const char *load_plugins)
 {
     mfxIMPL impl   = MFX_IMPL_AUTO_ANY;
@@ -177,58 +164,19 @@  int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
     const char *desc;
     int ret;

-    ret = MFXInit(impl, &ver, &qs->session);
+    ret = MFXInit(impl, &ver, session);
     if (ret < 0) {
         av_log(avctx, AV_LOG_ERROR, "Error initializing an internal MFX session\n");
         return ff_qsv_error(ret);
     }

-    ret = ff_qsv_set_display_handle(avctx, qs);
-    if (ret < 0)
+    ret = qsv_load_plugins(*session, load_plugins, avctx);
+    if (ret < 0) {
+        av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
         return ret;
-
-    if (load_plugins && *load_plugins) {
-        while (*load_plugins) {
-            mfxPluginUID uid;
-            int i, err = 0;
-
-            char *plugin = av_get_token(&load_plugins, ":");
-            if (!plugin)
-                return AVERROR(ENOMEM);
-            if (strlen(plugin) != 2 * sizeof(uid.Data)) {
-                av_log(avctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
-                err = AVERROR(EINVAL);
-                goto load_plugin_fail;
-            }
-
-            for (i = 0; i < sizeof(uid.Data); i++) {
-                err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
-                if (err != 1) {
-                    av_log(avctx, AV_LOG_ERROR, "Invalid plugin UID\n");
-                    err = AVERROR(EINVAL);
-                    goto load_plugin_fail;
-                }
-
-            }
-
-            ret = MFXVideoUSER_Load(qs->session, &uid, 1);
-            if (ret < 0) {
-                av_log(avctx, AV_LOG_ERROR, "Could not load the requested plugin: %s\n",
-                       plugin);
-                err = ff_qsv_error(ret);
-                goto load_plugin_fail;
-            }
-
-            if (*load_plugins)
-                load_plugins++;
-load_plugin_fail:
-            av_freep(&plugin);
-            if (err < 0)
-                return err;
-        }
     }

-    MFXQueryIMPL(qs->session, &impl);
+    MFXQueryIMPL(*session, &impl);

     switch (MFX_IMPL_BASETYPE(impl)) {
     case MFX_IMPL_SOFTWARE:
@@ -251,21 +199,146 @@  load_plugin_fail:
     return 0;
 }

-int ff_qsv_close_internal_session(QSVSession *qs)
+static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
+                                 mfxFrameAllocResponse *resp)
 {
-    if (qs->session) {
-        MFXClose(qs->session);
-        qs->session = NULL;
+    QSVFramesContext *ctx = pthis;
+    mfxFrameInfo      *i  = &req->Info;
+    mfxFrameInfo      *i1 = &ctx->info;
+
+    if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET) ||
+        !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)) ||
+        !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
+        return MFX_ERR_UNSUPPORTED;
+    if (i->Width  != i1->Width || i->Height != i1->Height ||
+        i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
+        av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
+               "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
+               i->Width,  i->Height,  i->FourCC,  i->ChromaFormat,
+               i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
+        return MFX_ERR_UNSUPPORTED;
+    }
+
+    resp->mids           = ctx->mids;
+    resp->NumFrameActual = ctx->nb_mids;
+
+    return MFX_ERR_NONE;
+}
+
+static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
+{
+    return MFX_ERR_NONE;
+}
+
+static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
+{
+    return MFX_ERR_UNSUPPORTED;
+}
+
+static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
+{
+    return MFX_ERR_UNSUPPORTED;
+}
+
+static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
+{
+    *hdl = mid;
+    return MFX_ERR_NONE;
+}
+
+int ff_qsv_init_session_hwcontext(AVCodecContext *avctx, mfxSession *psession,
+                                  QSVFramesContext *qsv_frames_ctx,
+                                  const char *load_plugins, int opaque)
+{
+    static const mfxHandleType handle_types[] = {
+        MFX_HANDLE_VA_DISPLAY,
+        MFX_HANDLE_D3D9_DEVICE_MANAGER,
+        MFX_HANDLE_D3D11_DEVICE,
+    };
+    mfxFrameAllocator frame_allocator = {
+        .pthis  = qsv_frames_ctx,
+        .Alloc  = qsv_frame_alloc,
+        .Lock   = qsv_frame_lock,
+        .Unlock = qsv_frame_unlock,
+        .GetHDL = qsv_frame_get_hdl,
+        .Free   = qsv_frame_free,
+    };
+
+    AVHWFramesContext    *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
+    AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
+    AVQSVDeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
+    mfxSession        parent_session = device_hwctx->session;
+
+    mfxSession    session;
+    mfxVersion    ver;
+    mfxIMPL       impl;
+    mfxHDL        handle = NULL;
+    mfxHandleType handle_type;
+    mfxStatus err;
+
+    int i, ret;
+
+    err = MFXQueryIMPL(parent_session, &impl);
+    if (err == MFX_ERR_NONE)
+        err = MFXQueryVersion(parent_session, &ver);
+    if (err != MFX_ERR_NONE) {
+        av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
+        return ff_qsv_error(err);
     }
-#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
-    if (qs->va_display) {
-        vaTerminate(qs->va_display);
-        qs->va_display = NULL;
+
+    for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
+        err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
+        if (err == MFX_ERR_NONE) {
+            handle_type = handle_types[i];
+            break;
+        }
+        handle = NULL;
     }
-    if (qs->fd_display > 0) {
-        close(qs->fd_display);
-        qs->fd_display = -1;
+    if (!handle) {
+        av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
+               "from the session\n");
     }
-#endif
+
+    err = MFXInit(impl, &ver, &session);
+    if (err != MFX_ERR_NONE) {
+        av_log(avctx, AV_LOG_ERROR,
+               "Error initializing a child MFX session: %d\n", err);
+        return ff_qsv_error(err);
+    }
+
+    if (handle) {
+        err = MFXVideoCORE_SetHandle(session, handle_type, handle);
+        if (err != MFX_ERR_NONE) {
+            av_log(avctx, AV_LOG_ERROR, "Error setting a HW handle: %d\n", err);
+            return ff_qsv_error(err);
+        }
+    }
+
+    ret = qsv_load_plugins(session, load_plugins, avctx);
+    if (ret < 0) {
+        av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
+        return ret;
+    }
+
+    if (!opaque) {
+        av_freep(&qsv_frames_ctx->mids);
+        qsv_frames_ctx->mids = av_mallocz_array(frames_hwctx->nb_surfaces,
+                                                sizeof(*qsv_frames_ctx->mids));
+        if (!qsv_frames_ctx->mids)
+            return AVERROR(ENOMEM);
+
+        qsv_frames_ctx->info    = frames_hwctx->surfaces[0].Info;
+        qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
+        for (i = 0; i < frames_hwctx->nb_surfaces; i++)
+            qsv_frames_ctx->mids[i] = frames_hwctx->surfaces[i].Data.MemId;
+
+        err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
+        if (err != MFX_ERR_NONE) {
+            av_log(avctx, AV_LOG_ERROR, "Error setting a frame allocator: %d\n", err);
+            return ff_qsv_error(err);
+        }
+    }
+
+    *psession = session;
     return 0;
 }
diff --git a/libavcodec/qsv_internal.h b/libavcodec/qsv_internal.h
index f289a2b..adb4cbc 100644
--- a/libavcodec/qsv_internal.h
+++ b/libavcodec/qsv_internal.h
@@ -21,21 +21,6 @@ 
 #ifndef AVCODEC_QSV_INTERNAL_H
 #define AVCODEC_QSV_INTERNAL_H

-#if CONFIG_VAAPI
-#define AVCODEC_QSV_LINUX_SESSION_HANDLE
-#endif //CONFIG_VAAPI
-
-#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
-#include <stdio.h>
-#include <string.h>
-#if HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-#include <fcntl.h>
-#include <va/va.h>
-#include <va/va_drm.h>
-#endif
-
 #include <mfx/mfxvideo.h>

 #include "libavutil/frame.h"
@@ -43,7 +28,7 @@ 
 #include "avcodec.h"

 #define QSV_VERSION_MAJOR 1
-#define QSV_VERSION_MINOR 9
+#define QSV_VERSION_MINOR 1

 #define ASYNC_DEPTH_DEFAULT 4       // internal parallelism

@@ -65,13 +50,12 @@  typedef struct QSVFrame {
     struct QSVFrame *next;
 } QSVFrame;

-typedef struct QSVSession {
-    mfxSession session;
-#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
-    int        fd_display;
-    VADisplay  va_display;
-#endif
-} QSVSession;
+typedef struct QSVFramesContext {
+    AVBufferRef *hw_frames_ctx;
+    mfxFrameInfo info;
+    mfxMemId *mids;
+    int    nb_mids;
+} QSVFramesContext;

 /**
  * Convert a libmfx error code into a ffmpeg error code.
@@ -79,9 +63,13 @@  typedef struct QSVSession {
 int ff_qsv_error(int mfx_err);

 int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id);
+int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile);

-int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
+int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
                                  const char *load_plugins);
-int ff_qsv_close_internal_session(QSVSession *qs);
+
+int ff_qsv_init_session_hwcontext(AVCodecContext *avctx, mfxSession *session,
+                                  QSVFramesContext *qsv_frames_ctx,
+                                  const char *load_plugins, int opaque);

 #endif /* AVCODEC_QSV_INTERNAL_H */
diff --git a/libavcodec/qsvdec.c b/libavcodec/qsvdec.c
index 6409312..e25dc73 100644
--- a/libavcodec/qsvdec.c
+++ b/libavcodec/qsvdec.c
@@ -27,6 +27,8 @@ 
 #include <mfx/mfxvideo.h>

 #include "libavutil/common.h"
+#include "libavutil/hwcontext.h"
+#include "libavutil/hwcontext_qsv.h"
 #include "libavutil/mem.h"
 #include "libavutil/log.h"
 #include "libavutil/pixfmt.h"
@@ -49,88 +51,128 @@  int ff_qsv_map_pixfmt(enum AVPixelFormat format)
     }
 }

-static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session)
+static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
+                            AVBufferRef *hw_frames_ref)
 {
-    if (!session) {
-        if (!q->internal_qs.session) {
-           int ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
-                                                  q->load_plugins);
+    int ret;
+
+    if (session) {
+        q->session = session;
+    } else if (hw_frames_ref) {
+        if (q->internal_session) {
+            MFXClose(q->internal_session);
+            q->internal_session = NULL;
+        }
+        av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
+
+        q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
+        if (!q->frames_ctx.hw_frames_ctx)
+            return AVERROR(ENOMEM);
+
+        ret = ff_qsv_init_session_hwcontext(avctx, &q->internal_session,
+                                            &q->frames_ctx, q->load_plugins,
+                                            q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
+        if (ret < 0) {
+            av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
+            return ret;
+        }
+
+        q->session = q->internal_session;
+    } else {
+        if (!q->internal_session) {
+            ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
+                                               q->load_plugins);
             if (ret < 0)
                 return ret;
         }

-        q->session = q->internal_qs.session;
-    } else {
-        q->session = session;
+        q->session = q->internal_session;
     }

-   return 0;
+    /* make sure the decoder is uninitialized */
+    MFXVideoDECODE_Close(q->session);
+
+    return 0;
 }

-static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt)
+static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
 {
     mfxSession session = NULL;
+    int iopattern = 0;
     mfxVideoParam param = { { 0 } };
-    mfxBitstream bs   = { { { 0 } } };
     int ret;
-    enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
-                                       AV_PIX_FMT_NV12,
-                                       AV_PIX_FMT_NONE };

-    ret = ff_get_format(avctx, pix_fmts);
-    if (ret < 0)
-        return ret;
-
-    avctx->pix_fmt      = ret;
+    if (!q->async_fifo) {
+        q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
+                                      (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*)));
+        if (!q->async_fifo)
+            return AVERROR(ENOMEM);
+    }

-    q->iopattern  = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
     if (avctx->hwaccel_context) {
-        AVQSVContext *qsv = avctx->hwaccel_context;
+        AVQSVContext *user_ctx = avctx->hwaccel_context;
+        session           = user_ctx->session;
+        iopattern         = user_ctx->iopattern;
+        q->ext_buffers    = user_ctx->ext_buffers;
+        q->nb_ext_buffers = user_ctx->nb_ext_buffers;
+    }

-        session           = qsv->session;
-        q->iopattern      = qsv->iopattern;
-        q->ext_buffers    = qsv->ext_buffers;
-        q->nb_ext_buffers = qsv->nb_ext_buffers;
+    if (avctx->hw_frames_ctx) {
+        AVHWFramesContext    *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
+        AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
+
+        if (!iopattern) {
+            if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
+                iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
+            else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
+                iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
+        }
     }

-    ret = qsv_init_session(avctx, q, session);
+    if (!iopattern)
+        iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
+    q->iopattern = iopattern;
+
+    ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx);
     if (ret < 0) {
         av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
         return ret;
     }

-    if (avpkt->size) {
-        bs.Data       = avpkt->data;
-        bs.DataLength = avpkt->size;
-        bs.MaxLength  = bs.DataLength;
-        bs.TimeStamp  = avpkt->pts;
-    } else
-        return AVERROR_INVALIDDATA;
-
     ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
-    if (ret < 0) {
-        av_log(avctx, AV_LOG_ERROR, "Unsupported codec_id %08x\n", avctx->codec_id);
+    if (ret < 0)
         return ret;
-    }
+    param.mfx.CodecId      = ret;
+    param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
+    param.mfx.CodecLevel   = MFX_LEVEL_UNKNOWN;

-    param.mfx.CodecId = ret;
-
-    ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, &param);
-    if (MFX_ERR_MORE_DATA==ret) {
-        /* this code means that header not found so we return packet size to skip
-           a current packet
-         */
-        return avpkt->size;
-    } else if (ret < 0) {
-        av_log(avctx, AV_LOG_ERROR, "Decode header error %d\n", ret);
-        return ff_qsv_error(ret);
+    param.mfx.FrameInfo.BitDepthLuma   = 8;
+    param.mfx.FrameInfo.BitDepthChroma = 8;
+    param.mfx.FrameInfo.Shift          = 0;
+    param.mfx.FrameInfo.FourCC         = MFX_FOURCC_NV12;
+    param.mfx.FrameInfo.Width          = avctx->coded_width;
+    param.mfx.FrameInfo.Height         = avctx->coded_height;
+    param.mfx.FrameInfo.ChromaFormat   = MFX_CHROMAFORMAT_YUV420;
+
+    switch (avctx->field_order) {
+    case AV_FIELD_PROGRESSIVE:
+        param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
+        break;
+    case AV_FIELD_TT:
+        param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
+        break;
+    case AV_FIELD_BB:
+        param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
+        break;
+    default:
+        param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
+        break;
     }
+
     param.IOPattern   = q->iopattern;
     param.AsyncDepth  = q->async_depth;
     param.ExtParam    = q->ext_buffers;
     param.NumExtParam = q->nb_ext_buffers;
-    param.mfx.FrameInfo.BitDepthLuma   = 8;
-    param.mfx.FrameInfo.BitDepthChroma = 8;

     ret = MFXVideoDECODE_Init(q->session, &param);
     if (ret < 0) {
@@ -144,37 +186,6 @@  static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt
         return ff_qsv_error(ret);
     }

-    avctx->profile      = param.mfx.CodecProfile;
-    avctx->level        = param.mfx.CodecLevel;
-    avctx->coded_width  = param.mfx.FrameInfo.Width;
-    avctx->coded_height = param.mfx.FrameInfo.Height;
-    avctx->width        = param.mfx.FrameInfo.CropW - param.mfx.FrameInfo.CropX;
-    avctx->height       = param.mfx.FrameInfo.CropH - param.mfx.FrameInfo.CropY;
-
-    /* maximum decoder latency should be not exceed max DPB size for h.264 and
-       HEVC which is 16 for both cases.
-       So weare  pre-allocating fifo big enough for 17 elements:
-     */
-    if (!q->async_fifo) {
-        q->async_fifo = av_fifo_alloc((1 + 16) *
-                                      (sizeof(mfxSyncPoint) + sizeof(QSVFrame*)));
-        if (!q->async_fifo)
-            return AVERROR(ENOMEM);
-    }
-
-    if (!q->input_fifo) {
-        q->input_fifo = av_fifo_alloc(1024*16);
-        if (!q->input_fifo)
-            return AVERROR(ENOMEM);
-    }
-
-    if (!q->pkt_fifo) {
-        q->pkt_fifo = av_fifo_alloc( sizeof(AVPacket) * (1 + 16) );
-        if (!q->pkt_fifo)
-            return AVERROR(ENOMEM);
-    }
-    q->engine_ready = 1;
-
     return 0;
 }

@@ -270,161 +281,77 @@  static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
     return NULL;
 }

-/*  This function uses for 'smart' releasing of consumed data
-    from the input bitstream fifo.
-    Since the input fifo mapped to mfxBitstream which does not understand
-    a wrapping of data over fifo end, we should also to relocate a possible
-    data rest to fifo begin. If rest of data is absent then we just reset fifo's
-    pointers to initial positions.
-    NOTE the case when fifo does contain unconsumed data is rare and typical
-    amount of such data is 1..4 bytes.
-*/
-static void qsv_fifo_relocate(AVFifoBuffer *f, int bytes_to_free)
-{
-    int data_size;
-    int data_rest = 0;
-
-    av_fifo_drain(f, bytes_to_free);
-
-    data_size = av_fifo_size(f);
-    if (data_size > 0) {
-        if (f->buffer!=f->rptr) {
-            if ( (f->end - f->rptr) < data_size) {
-                data_rest = data_size - (f->end - f->rptr);
-                data_size-=data_rest;
-                memmove(f->buffer+data_size, f->buffer, data_rest);
-            }
-            memmove(f->buffer, f->rptr, data_size);
-            data_size+= data_rest;
-        }
-    }
-    f->rptr = f->buffer;
-    f->wptr = f->buffer + data_size;
-    f->wndx = data_size;
-    f->rndx = 0;
-}
-
-
-static void close_decoder(QSVContext *q)
-{
-    QSVFrame *cur;
-
-    if (q->session)
-        MFXVideoDECODE_Close(q->session);
-
-    cur = q->work_frames;
-    while (cur) {
-        q->work_frames = cur->next;
-        av_frame_free(&cur->frame);
-        av_freep(&cur);
-        cur = q->work_frames;
-    }
-
-    q->engine_ready   = 0;
-    q->reinit_pending = 0;
-}
-
-static int do_qsv_decode(AVCodecContext *avctx, QSVContext *q,
-                  AVFrame *frame, int *got_frame,
-                  AVPacket *avpkt)
+static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
+                      AVFrame *frame, int *got_frame,
+                      AVPacket *avpkt)
 {
     QSVFrame *out_frame;
     mfxFrameSurface1 *insurf;
     mfxFrameSurface1 *outsurf;
-    mfxSyncPoint sync;
+    mfxSyncPoint *sync;
     mfxBitstream bs = { { { 0 } } };
     int ret;
-    int n_out_frames;
-    int buffered = 0;
-    int flush    = !avpkt->size || q->reinit_pending;
-
-    if (!q->engine_ready) {
-        ret = qsv_decode_init(avctx, q, avpkt);
-        if (ret)
-            return ret;
-    }

-    if (!flush) {
-        if (av_fifo_size(q->input_fifo)) {
-            /* we have got rest of previous packet into buffer */
-            if (av_fifo_space(q->input_fifo) < avpkt->size) {
-                ret = av_fifo_grow(q->input_fifo, avpkt->size);
-                if (ret < 0)
-                    return ret;
-            }
-            av_fifo_generic_write(q->input_fifo, avpkt->data, avpkt->size, NULL);
-            bs.Data       = q->input_fifo->rptr;
-            bs.DataLength = av_fifo_size(q->input_fifo);
-            buffered = 1;
-        } else {
-            bs.Data       = avpkt->data;
-            bs.DataLength = avpkt->size;
-        }
+    if (avpkt->size) {
+        bs.Data       = avpkt->data;
+        bs.DataLength = avpkt->size;
         bs.MaxLength  = bs.DataLength;
         bs.TimeStamp  = avpkt->pts;
     }

-    while (1) {
+    sync = av_mallocz(sizeof(*sync));
+    if (!sync) {
+        av_freep(&sync);
+        return AVERROR(ENOMEM);
+    }
+
+    do {
         ret = get_surface(avctx, q, &insurf);
         if (ret < 0)
             return ret;
-        do {
-            ret = MFXVideoDECODE_DecodeFrameAsync(q->session, flush ? NULL : &bs,
-                                                  insurf, &outsurf, &sync);
-            if (ret != MFX_WRN_DEVICE_BUSY)
-                break;
-            av_usleep(500);
-        } while (1);
-
-        if (MFX_WRN_VIDEO_PARAM_CHANGED==ret) {
-            /* TODO: handle here minor sequence header changing */
-        } else if (MFX_ERR_INCOMPATIBLE_VIDEO_PARAM==ret) {
-            av_fifo_reset(q->input_fifo);
-            flush = q->reinit_pending = 1;
-            continue;
-        }
-
-        if (sync) {
-            QSVFrame *out_frame = find_frame(q, outsurf);

-            if (!out_frame) {
-                av_log(avctx, AV_LOG_ERROR,
-                       "The returned surface does not correspond to any frame\n");
-                return AVERROR_BUG;
-            }
+        ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
+                                              insurf, &outsurf, sync);
+        if (ret == MFX_WRN_DEVICE_BUSY)
+            av_usleep(500);

-            out_frame->queued = 1;
-            av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
-            av_fifo_generic_write(q->async_fifo, &sync,      sizeof(sync),      NULL);
+    } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);

-            continue;
-        }
-        if (MFX_ERR_MORE_SURFACE != ret && ret < 0)
-            break;
+    if (ret != MFX_ERR_NONE &&
+        ret != MFX_ERR_MORE_DATA &&
+        ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
+        ret != MFX_ERR_MORE_SURFACE) {
+        av_log(avctx, AV_LOG_ERROR, "Error during QSV decoding.\n");
+        av_freep(&sync);
+        return ff_qsv_error(ret);
     }

     /* make sure we do not enter an infinite loop if the SDK
      * did not consume any data and did not return anything */
-    if (!sync && !bs.DataOffset && !flush) {
+    if (!*sync && !bs.DataOffset) {
         av_log(avctx, AV_LOG_WARNING, "A decode call did not consume any data\n");
         bs.DataOffset = avpkt->size;
     }

-    if (buffered) {
-        qsv_fifo_relocate(q->input_fifo, bs.DataOffset);
-    } else if (bs.DataOffset!=avpkt->size) {
-        /* some data of packet was not consumed. store it to local buffer */
-        av_fifo_generic_write(q->input_fifo, avpkt->data+bs.DataOffset,
-                              avpkt->size - bs.DataOffset, NULL);
-    }
+    if (*sync) {
+        QSVFrame *out_frame = find_frame(q, outsurf);

-    if (MFX_ERR_MORE_DATA!=ret && ret < 0) {
-        av_log(avctx, AV_LOG_ERROR, "Error %d during QSV decoding.\n", ret);
-        return ff_qsv_error(ret);
+        if (!out_frame) {
+            av_log(avctx, AV_LOG_ERROR,
+                   "The returned surface does not correspond to any frame\n");
+            av_freep(&sync);
+            return AVERROR_BUG;
+        }
+
+        out_frame->queued = 1;
+        av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
+        av_fifo_generic_write(q->async_fifo, &sync,      sizeof(sync),      NULL);
+    } else {
+        av_freep(&sync);
     }
-    n_out_frames = av_fifo_size(q->async_fifo) / (sizeof(out_frame)+sizeof(sync));

-    if (n_out_frames > q->async_depth || (flush && n_out_frames) ) {
+    if (!av_fifo_space(q->async_fifo) ||
+        (!avpkt->size && av_fifo_size(q->async_fifo))) {
         AVFrame *src_frame;

         av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
@@ -432,9 +359,11 @@  static int do_qsv_decode(AVCodecContext *avctx, QSVContext *q,
         out_frame->queued = 0;

         do {
-            ret = MFXVideoCORE_SyncOperation(q->session, sync, 1000);
+            ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
         } while (ret == MFX_WRN_IN_EXECUTION);

+        av_freep(&sync);
+
         src_frame = out_frame->frame;

         ret = av_frame_ref(frame, src_frame);
@@ -462,149 +391,125 @@  FF_ENABLE_DEPRECATION_WARNINGS
         *got_frame = 1;
     }

-    return avpkt->size;
+    return bs.DataOffset;
 }
-/*
- This function inserts a packet at fifo front.
-*/
-static void qsv_packet_push_front(QSVContext *q, AVPacket *avpkt)
-{
-    int fifo_size = av_fifo_size(q->pkt_fifo);
-    if (!fifo_size) {
-    /* easy case fifo is empty */
-        av_fifo_generic_write(q->pkt_fifo, avpkt, sizeof(*avpkt), NULL);
-    } else {
-    /* realloc necessary */
-        AVPacket pkt;
-        AVFifoBuffer *fifo = av_fifo_alloc(fifo_size+av_fifo_space(q->pkt_fifo));

-        av_fifo_generic_write(fifo, avpkt, sizeof(*avpkt), NULL);
-
-        while (av_fifo_size(q->pkt_fifo)) {
-            av_fifo_generic_read(q->pkt_fifo, &pkt, sizeof(pkt), NULL);
-            av_fifo_generic_write(fifo,       &pkt, sizeof(pkt), NULL);
-        }
-        av_fifo_free(q->pkt_fifo);
-        q->pkt_fifo = fifo;
-    }
-}
-int ff_qsv_decode(AVCodecContext *avctx, QSVContext *q,
-                  AVFrame *frame, int *got_frame,
-                  AVPacket *avpkt)
+int ff_qsv_decode_close(QSVContext *q)
 {
-    AVPacket pkt_ref = { 0 };
-    int ret = 0;
+    QSVFrame *cur = q->work_frames;

-    if (q->pkt_fifo && av_fifo_size(q->pkt_fifo) >= sizeof(AVPacket)) {
-        /* we already have got some buffered packets. so add new to tail */
-        ret = av_packet_ref(&pkt_ref, avpkt);
-        if (ret < 0)
-            return ret;
-        av_fifo_generic_write(q->pkt_fifo, &pkt_ref, sizeof(pkt_ref), NULL);
-    }
-    if (q->reinit_pending) {
-        ret = do_qsv_decode(avctx, q, frame, got_frame, avpkt);
+    if (q->session)
+        MFXVideoDECODE_Close(q->session);

-        if (!*got_frame) {
-            /* Flushing complete, no more frames  */
-            close_decoder(q);
-            //return ff_qsv_decode(avctx, q, frame, got_frame, avpkt);
-        }
-    }
-    if (!q->reinit_pending) {
-        if (q->pkt_fifo && av_fifo_size(q->pkt_fifo) >= sizeof(AVPacket)) {
-            /* process buffered packets */
-            while (!*got_frame && av_fifo_size(q->pkt_fifo) >= sizeof(AVPacket)) {
-                av_fifo_generic_read(q->pkt_fifo, &pkt_ref, sizeof(pkt_ref), NULL);
-                ret = do_qsv_decode(avctx, q, frame, got_frame, &pkt_ref);
-                if (q->reinit_pending) {
-                    /*
-                       A rare case: new reinit pending when buffering existing.
-                       We should to return the pkt_ref back to same place of fifo
-                    */
-                    qsv_packet_push_front(q, &pkt_ref);
-                } else {
-                    av_packet_unref(&pkt_ref);
-                }
-           }
-        } else {
-            /* general decoding */
-            ret = do_qsv_decode(avctx, q, frame, got_frame, avpkt);
-            if (q->reinit_pending) {
-                ret = av_packet_ref(&pkt_ref, avpkt);
-                if (ret < 0)
-                    return ret;
-                av_fifo_generic_write(q->pkt_fifo, &pkt_ref, sizeof(pkt_ref), NULL);
-            }
-        }
-    }
+    while (q->async_fifo && av_fifo_size(q->async_fifo)) {
+        QSVFrame *out_frame;
+        mfxSyncPoint *sync;

-    return ret;
-}
-/*
- This function resets decoder and corresponded buffers before seek operation
-*/
-void ff_qsv_decode_reset(AVCodecContext *avctx, QSVContext *q)
-{
-    QSVFrame *cur;
-    AVPacket pkt;
-    int ret = 0;
-    mfxVideoParam param = { { 0 } };
-
-    if (q->reinit_pending) {
-        close_decoder(q);
-    } else if (q->engine_ready) {
-        ret = MFXVideoDECODE_GetVideoParam(q->session, &param);
-        if (ret < 0) {
-            av_log(avctx, AV_LOG_ERROR, "MFX decode get param error %d\n", ret);
-        }
+        av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
+        av_fifo_generic_read(q->async_fifo, &sync,      sizeof(sync),      NULL);

-        ret = MFXVideoDECODE_Reset(q->session, &param);
-        if (ret < 0) {
-            av_log(avctx, AV_LOG_ERROR, "MFX decode reset error %d\n", ret);
-        }
+        av_freep(&sync);
+    }

-        /* Free all frames*/
+    while (cur) {
+        q->work_frames = cur->next;
+        av_frame_free(&cur->frame);
+        av_freep(&cur);
         cur = q->work_frames;
-        while (cur) {
-            q->work_frames = cur->next;
-            av_frame_free(&cur->frame);
-            av_freep(&cur);
-            cur = q->work_frames;
-        }
     }

-    /* Reset output surfaces */
-    if (q->async_fifo)
-        av_fifo_reset(q->async_fifo);
+    av_fifo_free(q->async_fifo);
+    q->async_fifo = NULL;
+
+    av_parser_close(q->parser);
+    avcodec_free_context(&q->avctx_internal);

-    /* Reset input packets fifo */
-    while (q->pkt_fifo && av_fifo_size(q->pkt_fifo)) {
-        av_fifo_generic_read(q->pkt_fifo, &pkt, sizeof(pkt), NULL);
-        av_packet_unref(&pkt);
-    }
+    if (q->internal_session)
+        MFXClose(q->internal_session);

-    /* Reset input bitstream fifo */
-    if (q->input_fifo)
-        av_fifo_reset(q->input_fifo);
+    av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
+    av_freep(&q->frames_ctx.mids);
+    q->frames_ctx.nb_mids = 0;
+
+    return 0;
 }

-int ff_qsv_decode_close(QSVContext *q)
+int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
+                        AVFrame *frame, int *got_frame, AVPacket *pkt)
 {
-    close_decoder(q);
+    uint8_t *dummy_data;
+    int dummy_size;
+    int ret;

-    q->session = NULL;
+    if (!q->avctx_internal) {
+        q->avctx_internal = avcodec_alloc_context3(NULL);
+        if (!q->avctx_internal)
+            return AVERROR(ENOMEM);

-    ff_qsv_close_internal_session(&q->internal_qs);
+        q->parser = av_parser_init(avctx->codec_id);
+        if (!q->parser)
+            return AVERROR(ENOMEM);

-    av_fifo_free(q->async_fifo);
-    q->async_fifo = NULL;
+        q->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
+        q->orig_pix_fmt   = AV_PIX_FMT_NONE;
+    }
+
+    if (!pkt->size)
+        return qsv_decode(avctx, q, frame, got_frame, pkt);
+
+    /* we assume the packets are already split properly and want
+     * just the codec parameters here */
+    av_parser_parse2(q->parser, q->avctx_internal,
+                     &dummy_data, &dummy_size,
+                     pkt->data, pkt->size, pkt->pts, pkt->dts,
+                     pkt->pos);
+
+    /* TODO: flush delayed frames on reinit */
+    if (q->parser->format       != q->orig_pix_fmt    ||
+        q->parser->coded_width  != avctx->coded_width ||
+        q->parser->coded_height != avctx->coded_height) {
+        enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
+                                           AV_PIX_FMT_NONE,
+                                           AV_PIX_FMT_NONE };
+        enum AVPixelFormat qsv_format;
+
+        qsv_format = ff_qsv_map_pixfmt(q->parser->format);
+        if (qsv_format < 0) {
+            av_log(avctx, AV_LOG_ERROR,
+                   "Only 8-bit YUV420 streams are supported.\n");
+            ret = AVERROR(ENOSYS);
+            goto reinit_fail;
+        }

-    av_fifo_free(q->input_fifo);
-    q->input_fifo = NULL;
+        q->orig_pix_fmt     = q->parser->format;
+        avctx->pix_fmt      = pix_fmts[1] = qsv_format;
+        avctx->width        = q->parser->width;
+        avctx->height       = q->parser->height;
+        avctx->coded_width  = q->parser->coded_width;
+        avctx->coded_height = q->parser->coded_height;
+        avctx->field_order  = q->parser->field_order;
+        avctx->level        = q->avctx_internal->level;
+        avctx->profile      = q->avctx_internal->profile;
+
+        ret = ff_get_format(avctx, pix_fmts);
+        if (ret < 0)
+            goto reinit_fail;

-    av_fifo_free(q->pkt_fifo);
-    q->pkt_fifo = NULL;
+        avctx->pix_fmt = ret;

-    return 0;
+        ret = qsv_decode_init(avctx, q);
+        if (ret < 0)
+            goto reinit_fail;
+    }
+
+    return qsv_decode(avctx, q, frame, got_frame, pkt);
+
+reinit_fail:
+    q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE;
+    return ret;
+}
+
+void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
+{
+    q->orig_pix_fmt = AV_PIX_FMT_NONE;
 }
diff --git a/libavcodec/qsvdec.h b/libavcodec/qsvdec.h
index 97a3315..0bf1e55 100644
--- a/libavcodec/qsvdec.h
+++ b/libavcodec/qsvdec.h
@@ -41,7 +41,9 @@  typedef struct QSVContext {

     // the session we allocated internally, in case the caller did not provide
     // one
-    QSVSession internal_qs;
+    mfxSession internal_session;
+
+    QSVFramesContext frames_ctx;

     /**
      * a linked list of frames currently being used by QSV
@@ -49,22 +51,11 @@  typedef struct QSVContext {
     QSVFrame *work_frames;

     AVFifoBuffer *async_fifo;
-    AVFifoBuffer *input_fifo;
-
-    // we should to buffer input packets at some cases
-    // else it is not possible to handle dynamic stream changes correctly
-    // this fifo uses for input packets buffering
-    AVFifoBuffer *pkt_fifo;
-
-    // this flag indicates that header parsed,
-    // decoder instance created and ready to general decoding
-    int engine_ready;

-    // we can not just re-init decoder if different sequence header arrived
-    // we should to deliver all buffered frames but we can not decode new packets
-    // this time. So when reinit_pending is non-zero we flushing decoder and
-    // accumulate new arrived packets into pkt_fifo
-    int reinit_pending;
+    // the internal parser and codec context for parsing the data
+    AVCodecParserContext *parser;
+    AVCodecContext *avctx_internal;
+    enum AVPixelFormat orig_pix_fmt;

     // options set by the caller
     int async_depth;
@@ -78,11 +69,10 @@  typedef struct QSVContext {

 int ff_qsv_map_pixfmt(enum AVPixelFormat format);

-int ff_qsv_decode(AVCodecContext *s, QSVContext *q,
-                  AVFrame *frame, int *got_frame,
-                  AVPacket *avpkt);
+int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
+                        AVFrame *frame, int *got_frame, AVPacket *pkt);

-void ff_qsv_decode_reset(AVCodecContext *avctx, QSVContext *q);
+void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q);

 int ff_qsv_decode_close(QSVContext *q);

diff --git a/libavcodec/qsvdec_h2645.c b/libavcodec/qsvdec_h2645.c
index 98a1952..2a1d2b4 100644
--- a/libavcodec/qsvdec_h2645.c
+++ b/libavcodec/qsvdec_h2645.c
@@ -33,11 +33,14 @@ 

 #include "avcodec.h"
 #include "internal.h"
+#include "qsv_internal.h"
 #include "qsvdec.h"
+#include "qsv.h"

 enum LoadPlugin {
     LOAD_PLUGIN_NONE,
     LOAD_PLUGIN_HEVC_SW,
+    LOAD_PLUGIN_HEVC_HW,
 };

 typedef struct QSVH2645Context {
@@ -86,7 +89,8 @@  static av_cold int qsv_decode_init(AVCodecContext *avctx)
     int ret;

     if (avctx->codec_id == AV_CODEC_ID_HEVC && s->load_plugin != LOAD_PLUGIN_NONE) {
-        static const char *uid_hevcenc_sw = "15dd936825ad475ea34e35f3f54217a6";
+        static const char *uid_hevcdec_sw = "15dd936825ad475ea34e35f3f54217a6";
+        static const char *uid_hevcdec_hw = "33a61c0b4c27454ca8d85dde757c6f8e";

         if (s->qsv.load_plugins[0]) {
             av_log(avctx, AV_LOG_WARNING,
@@ -94,22 +98,22 @@  static av_cold int qsv_decode_init(AVCodecContext *avctx)
                    "The load_plugin value will be ignored.\n");
         } else {
             av_freep(&s->qsv.load_plugins);
-            s->qsv.load_plugins = av_strdup(uid_hevcenc_sw);
+
+            if (s->load_plugin == LOAD_PLUGIN_HEVC_SW)
+                s->qsv.load_plugins = av_strdup(uid_hevcdec_sw);
+            else
+                s->qsv.load_plugins = av_strdup(uid_hevcdec_hw);
             if (!s->qsv.load_plugins)
                 return AVERROR(ENOMEM);
         }
     }
+
     s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
     if (!s->packet_fifo) {
         ret = AVERROR(ENOMEM);
         goto fail;
     }

-    if (avctx->codec_id == AV_CODEC_ID_H264) {
-        //regarding ticks_per_frame description, should be 2 for h.264:
-        avctx->ticks_per_frame = 2;
-    }
-
     return 0;
 fail:
     qsv_decode_close(avctx);
@@ -184,7 +188,7 @@  static int qsv_decode_frame(AVCodecContext *avctx, void *data,

             /* no more data */
             if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket))
-                return avpkt->size ? avpkt->size : ff_qsv_decode(avctx, &s->qsv, frame, got_frame, avpkt);
+                return avpkt->size ? avpkt->size : ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, avpkt);

             av_packet_unref(&s->pkt_filtered);

@@ -202,7 +206,7 @@  static int qsv_decode_frame(AVCodecContext *avctx, void *data,
                 av_packet_unref(&input_ref);
         }

-        ret = ff_qsv_decode(avctx, &s->qsv, frame, got_frame, &s->pkt_filtered);
+        ret = ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, &s->pkt_filtered);
         if (ret < 0)
             return ret;

@@ -216,7 +220,9 @@  static int qsv_decode_frame(AVCodecContext *avctx, void *data,
 static void qsv_decode_flush(AVCodecContext *avctx)
 {
     QSVH2645Context *s = avctx->priv_data;
-    ff_qsv_decode_reset(avctx, &s->qsv);
+
+    qsv_clear_buffers(s);
+    ff_qsv_decode_flush(avctx, &s->qsv);
 }

 #define OFFSET(x) offsetof(QSVH2645Context, x)
@@ -233,9 +239,10 @@  AVHWAccel ff_hevc_qsv_hwaccel = {
 static const AVOption hevc_options[] = {
     { "async_depth", "Internal parallelization depth, the higher the value the higher the latency.", OFFSET(qsv.async_depth), AV_OPT_TYPE_INT, { .i64 = ASYNC_DEPTH_DEFAULT }, 0, INT_MAX, VD },

-    { "load_plugin", "A user plugin to load in an internal session", OFFSET(load_plugin), AV_OPT_TYPE_INT, { .i64 = LOAD_PLUGIN_HEVC_SW }, LOAD_PLUGIN_NONE, LOAD_PLUGIN_HEVC_SW, VD, "load_plugin" },
+    { "load_plugin", "A user plugin to load in an internal session", OFFSET(load_plugin), AV_OPT_TYPE_INT, { .i64 = LOAD_PLUGIN_HEVC_HW }, LOAD_PLUGIN_NONE, LOAD_PLUGIN_HEVC_HW, VD, "load_plugin" },
     { "none",     NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_NONE },    0, 0, VD, "load_plugin" },
     { "hevc_sw",  NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_HEVC_SW }, 0, 0, VD, "load_plugin" },
+    { "hevc_hw",  NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_HEVC_HW }, 0, 0, VD, "load_plugin" },

     { "load_plugins", "A :-separate list of hexadecimal plugin UIDs to load in an internal session",
         OFFSET(qsv.load_plugins), AV_OPT_TYPE_STRING, { .str = "" }, 0, 0, VD },
diff --git a/libavcodec/qsvdec_mpeg2.c b/libavcodec/qsvdec_mpeg2.c
index 70ccbc5..c080657 100644
--- a/libavcodec/qsvdec_mpeg2.c
+++ b/libavcodec/qsvdec_mpeg2.c
@@ -1,5 +1,7 @@ 
 /*
- * Intel MediaSDK QSV based MPEG-2 video decoder
+ * Intel MediaSDK QSV based MPEG-2 decoder
+ *
+ * copyright (c) 2015 Anton Khirnov
  *
  * This file is part of FFmpeg.
  *
@@ -18,32 +20,70 @@ 
  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  */

+
 #include <stdint.h>
 #include <string.h>

+#include <mfx/mfxvideo.h>
+
 #include "libavutil/common.h"
+#include "libavutil/fifo.h"
 #include "libavutil/opt.h"

 #include "avcodec.h"
+#include "internal.h"
+#include "qsv_internal.h"
 #include "qsvdec.h"
+#include "qsv.h"

 typedef struct QSVMPEG2Context {
     AVClass *class;
     QSVContext qsv;
+
+    AVFifoBuffer *packet_fifo;
+
+    AVPacket input_ref;
 } QSVMPEG2Context;

+static void qsv_clear_buffers(QSVMPEG2Context *s)
+{
+    AVPacket pkt;
+    while (av_fifo_size(s->packet_fifo) >= sizeof(pkt)) {
+        av_fifo_generic_read(s->packet_fifo, &pkt, sizeof(pkt), NULL);
+        av_packet_unref(&pkt);
+    }
+
+    av_packet_unref(&s->input_ref);
+}
+
 static av_cold int qsv_decode_close(AVCodecContext *avctx)
 {
     QSVMPEG2Context *s = avctx->priv_data;

     ff_qsv_decode_close(&s->qsv);

+    qsv_clear_buffers(s);
+
+    av_fifo_free(s->packet_fifo);
+
     return 0;
 }

 static av_cold int qsv_decode_init(AVCodecContext *avctx)
 {
+    QSVMPEG2Context *s = avctx->priv_data;
+    int ret;
+
+    s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
+    if (!s->packet_fifo) {
+        ret = AVERROR(ENOMEM);
+        goto fail;
+    }
+
     return 0;
+fail:
+    qsv_decode_close(avctx);
+    return ret;
 }

 static int qsv_decode_frame(AVCodecContext *avctx, void *data,
@@ -51,14 +91,53 @@  static int qsv_decode_frame(AVCodecContext *avctx, void *data,
 {
     QSVMPEG2Context *s = avctx->priv_data;
     AVFrame *frame    = data;
+    int ret;
+
+    /* buffer the input packet */
+    if (avpkt->size) {
+        AVPacket input_ref = { 0 };
+
+        if (av_fifo_space(s->packet_fifo) < sizeof(input_ref)) {
+            ret = av_fifo_realloc2(s->packet_fifo,
+                                   av_fifo_size(s->packet_fifo) + sizeof(input_ref));
+            if (ret < 0)
+                return ret;
+        }
+
+        ret = av_packet_ref(&input_ref, avpkt);
+        if (ret < 0)
+            return ret;
+        av_fifo_generic_write(s->packet_fifo, &input_ref, sizeof(input_ref), NULL);
+    }

-    return ff_qsv_decode(avctx, &s->qsv, frame, got_frame, avpkt);
+    /* process buffered data */
+    while (!*got_frame) {
+        if (s->input_ref.size <= 0) {
+            /* no more data */
+            if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket))
+                return avpkt->size ? avpkt->size : ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, avpkt);
+
+            av_packet_unref(&s->input_ref);
+            av_fifo_generic_read(s->packet_fifo, &s->input_ref, sizeof(s->input_ref), NULL);
+        }
+
+        ret = ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, &s->input_ref);
+        if (ret < 0)
+            return ret;
+
+        s->input_ref.size -= ret;
+        s->input_ref.data += ret;
+    }
+
+    return avpkt->size;
 }

 static void qsv_decode_flush(AVCodecContext *avctx)
 {
     QSVMPEG2Context *s = avctx->priv_data;
-    ff_qsv_decode_reset(avctx, &s->qsv);
+
+    qsv_clear_buffers(s);
+    ff_qsv_decode_flush(avctx, &s->qsv);
 }

 AVHWAccel ff_mpeg2_qsv_hwaccel = {
diff --git a/libavcodec/qsvdec_vc1.c b/libavcodec/qsvdec_vc1.c
index fcf101f..f7b1fb0 100644
--- a/libavcodec/qsvdec_vc1.c
+++ b/libavcodec/qsvdec_vc1.c
@@ -21,18 +21,37 @@ 
 #include <stdint.h>
 #include <string.h>

+#include <mfx/mfxvideo.h>
+
 #include "libavutil/common.h"
 #include "libavutil/fifo.h"
 #include "libavutil/opt.h"

 #include "avcodec.h"
+#include "internal.h"
+#include "qsv_internal.h"
 #include "qsvdec.h"
+#include "qsv.h"

 typedef struct QSVVC1Context {
     AVClass *class;
     QSVContext qsv;
+
+    AVFifoBuffer *packet_fifo;
+
+    AVPacket input_ref;
 } QSVVC1Context;

+static void qsv_clear_buffers(QSVVC1Context *s)
+{
+    AVPacket pkt;
+    while (av_fifo_size(s->packet_fifo) >= sizeof(pkt)) {
+        av_fifo_generic_read(s->packet_fifo, &pkt, sizeof(pkt), NULL);
+        av_packet_unref(&pkt);
+    }
+
+    av_packet_unref(&s->input_ref);
+}

 static av_cold int qsv_decode_close(AVCodecContext *avctx)
 {
@@ -40,22 +59,82 @@  static av_cold int qsv_decode_close(AVCodecContext *avctx)

     ff_qsv_decode_close(&s->qsv);

+    qsv_clear_buffers(s);
+
+    av_fifo_free(s->packet_fifo);
+
     return 0;
 }

+static av_cold int qsv_decode_init(AVCodecContext *avctx)
+{
+    QSVVC1Context *s = avctx->priv_data;
+    int ret;
+
+    s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
+    if (!s->packet_fifo) {
+        ret = AVERROR(ENOMEM);
+        goto fail;
+    }
+
+    return 0;
+fail:
+    qsv_decode_close(avctx);
+    return ret;
+}
+
 static int qsv_decode_frame(AVCodecContext *avctx, void *data,
                             int *got_frame, AVPacket *avpkt)
 {
     QSVVC1Context *s = avctx->priv_data;
-    AVFrame *frame    = data;
+    AVFrame *frame   = data;
+    int ret;
+
+    /* buffer the input packet */
+    if (avpkt->size) {
+        AVPacket input_ref = { 0 };
+
+        if (av_fifo_space(s->packet_fifo) < sizeof(input_ref)) {
+            ret = av_fifo_realloc2(s->packet_fifo,
+                                   av_fifo_size(s->packet_fifo) + sizeof(input_ref));
+            if (ret < 0)
+                return ret;
+        }
+
+        ret = av_packet_ref(&input_ref, avpkt);
+        if (ret < 0)
+            return ret;
+        av_fifo_generic_write(s->packet_fifo, &input_ref, sizeof(input_ref), NULL);
+    }

-    return ff_qsv_decode(avctx, &s->qsv, frame, got_frame, avpkt);
+    /* process buffered data */
+    while (!*got_frame) {
+        if (s->input_ref.size <= 0) {
+            /* no more data */
+            if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket))
+                return avpkt->size ? avpkt->size : ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, avpkt);
+
+            av_packet_unref(&s->input_ref);
+            av_fifo_generic_read(s->packet_fifo, &s->input_ref, sizeof(s->input_ref), NULL);
+        }
+
+        ret = ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, &s->input_ref);
+        if (ret < 0)
+            return ret;
+
+        s->input_ref.size -= ret;
+        s->input_ref.data += ret;
+    }
+
+    return avpkt->size;
 }

 static void qsv_decode_flush(AVCodecContext *avctx)
 {
     QSVVC1Context *s = avctx->priv_data;
-    ff_qsv_decode_reset(avctx, &s->qsv);
+
+    qsv_clear_buffers(s);
+    ff_qsv_decode_flush(avctx, &s->qsv);
 }

 AVHWAccel ff_vc1_qsv_hwaccel = {
@@ -85,11 +164,11 @@  AVCodec ff_vc1_qsv_decoder = {
     .priv_data_size = sizeof(QSVVC1Context),
     .type           = AVMEDIA_TYPE_VIDEO,
     .id             = AV_CODEC_ID_VC1,
-    .init           = NULL,
+    .init           = qsv_decode_init,
     .decode         = qsv_decode_frame,
     .flush          = qsv_decode_flush,
     .close          = qsv_decode_close,
-    .capabilities   = AV_CODEC_CAP_DELAY,
+    .capabilities   = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_DR1,
     .priv_class     = &class,
     .pix_fmts       = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12,
                                                     AV_PIX_FMT_QSV,
diff --git a/libavcodec/qsvenc.c b/libavcodec/qsvenc.c
index f56cb61..aa20971 100644
--- a/libavcodec/qsvenc.c
+++ b/libavcodec/qsvenc.c
@@ -26,6 +26,8 @@ 
 #include <mfx/mfxvideo.h>

 #include "libavutil/common.h"
+#include "libavutil/hwcontext.h"
+#include "libavutil/hwcontext_qsv.h"
 #include "libavutil/mem.h"
 #include "libavutil/log.h"
 #include "libavutil/time.h"
@@ -379,31 +381,25 @@  static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
     q->param.mfx.EncodedOrder       = 0;
     q->param.mfx.BufferSizeInKB     = 0;

-    q->param.mfx.FrameInfo.FourCC         = MFX_FOURCC_NV12;
-    q->param.mfx.FrameInfo.CropX          = 0;
-    q->param.mfx.FrameInfo.CropY          = 0;
-    q->param.mfx.FrameInfo.CropW          = avctx->width;
-    q->param.mfx.FrameInfo.CropH          = avctx->height;
-    q->param.mfx.FrameInfo.AspectRatioW   = avctx->sample_aspect_ratio.num;
-    q->param.mfx.FrameInfo.AspectRatioH   = avctx->sample_aspect_ratio.den;
-    q->param.mfx.FrameInfo.ChromaFormat   = MFX_CHROMAFORMAT_YUV420;
-    q->param.mfx.FrameInfo.BitDepthLuma   = 8;
-    q->param.mfx.FrameInfo.BitDepthChroma = 8;
-    q->param.mfx.FrameInfo.Width          = FFALIGN(avctx->width, q->width_align);
-
-    if (avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT) {
-       /* A true field layout (TFF or BFF) is not important here,
-          it will specified later during frame encoding. But it is important
-          to specify is frame progressive or not because allowed heigh alignment
-          does depend by this.
-        */
-        q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
-        q->height_align = 32;
+    if (avctx->hw_frames_ctx) {
+        AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
+        AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
+        q->param.mfx.FrameInfo = frames_hwctx->surfaces[0].Info;
     } else {
-        q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
-        q->height_align = 16;
+        q->param.mfx.FrameInfo.FourCC         = MFX_FOURCC_NV12;
+        q->param.mfx.FrameInfo.Width          = FFALIGN(avctx->width, q->width_align);
+        q->param.mfx.FrameInfo.Height         = FFALIGN(avctx->height, 32);
+        q->param.mfx.FrameInfo.CropX          = 0;
+        q->param.mfx.FrameInfo.CropY          = 0;
+        q->param.mfx.FrameInfo.CropW          = avctx->width;
+        q->param.mfx.FrameInfo.CropH          = avctx->height;
+        q->param.mfx.FrameInfo.AspectRatioW   = avctx->sample_aspect_ratio.num;
+        q->param.mfx.FrameInfo.AspectRatioH   = avctx->sample_aspect_ratio.den;
+        q->param.mfx.FrameInfo.PicStruct      = MFX_PICSTRUCT_PROGRESSIVE;
+        q->param.mfx.FrameInfo.ChromaFormat   = MFX_CHROMAFORMAT_YUV420;
+        q->param.mfx.FrameInfo.BitDepthLuma   = 8;
+        q->param.mfx.FrameInfo.BitDepthChroma = 8;
     }
-   q->param.mfx.FrameInfo.Height    = FFALIGN(avctx->height, q->height_align);

     if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
         q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
@@ -536,7 +532,7 @@  FF_ENABLE_DEPRECATION_WARNINGS

             q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2;

-#if QSV_VERSION_ATLEAST(1,8)
+#if QSV_HAVE_LA_DS
             q->extco2.LookAheadDS           = q->look_ahead_downsampling;
 #endif
         }
@@ -673,12 +669,45 @@  static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q)
     return 0;
 }

+static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q)
+{
+    int ret;
+
+    if (avctx->hwaccel_context) {
+        AVQSVContext *qsv = avctx->hwaccel_context;
+        q->session = qsv->session;
+    } else if (avctx->hw_frames_ctx) {
+        q->frames_ctx.hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
+        if (!q->frames_ctx.hw_frames_ctx)
+            return AVERROR(ENOMEM);
+
+        ret = ff_qsv_init_session_hwcontext(avctx, &q->internal_session,
+                                            &q->frames_ctx, q->load_plugins,
+                                            q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY);
+        if (ret < 0) {
+            av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
+            return ret;
+        }
+
+        q->session = q->internal_session;
+    } else {
+        ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
+                                           q->load_plugins);
+        if (ret < 0)
+            return ret;
+
+        q->session = q->internal_session;
+    }
+
+    return 0;
+}
+
 int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
 {
+    int iopattern = 0;
     int opaque_alloc = 0;
     int ret;

-    q->param.IOPattern  = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
     q->param.AsyncDepth = q->async_depth;

     q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
@@ -689,32 +718,34 @@  int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
     if (avctx->hwaccel_context) {
         AVQSVContext *qsv = avctx->hwaccel_context;

-        q->session         = qsv->session;
-        q->param.IOPattern = qsv->iopattern;
-
+        iopattern    = qsv->iopattern;
         opaque_alloc = qsv->opaque_alloc;
     }

-    if (!q->session) {
-        ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
-                                           q->load_plugins);
-        if (ret < 0)
-            return ret;
+    if (avctx->hw_frames_ctx) {
+        AVHWFramesContext    *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
+        AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;

-        q->session = q->internal_qs.session;
+        if (!iopattern) {
+            if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
+                iopattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY;
+            else if (frames_hwctx->frame_type &
+                     (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
+                iopattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;
+        }
     }

-    ret = init_video_param(avctx, q);
+    if (!iopattern)
+        iopattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
+    q->param.IOPattern = iopattern;
+
+    ret = qsvenc_init_session(avctx, q);
     if (ret < 0)
         return ret;

-    ret = MFXVideoENCODE_Query(q->session, &q->param,&q->param);
-    if (MFX_WRN_PARTIAL_ACCELERATION==ret) {
-        av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n");
-    } else if (ret < 0) {
-        av_log(avctx, AV_LOG_ERROR, "Error %d querying encoder params\n", ret);
-        return ff_qsv_error(ret);
-    }
+    ret = init_video_param(avctx, q);
+    if (ret < 0)
+        return ret;

     ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req);
     if (ret < 0) {
@@ -758,7 +789,7 @@  int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
     }

     ret = MFXVideoENCODE_Init(q->session, &q->param);
-    if (MFX_WRN_PARTIAL_ACCELERATION==ret) {
+    if (ret == MFX_WRN_PARTIAL_ACCELERATION) {
         av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n");
     } else if (ret < 0) {
         av_log(avctx, AV_LOG_ERROR, "Error initializing the encoder\n");
@@ -856,9 +887,8 @@  static int submit_frame(QSVEncContext *q, const AVFrame *frame,
         qf->surface = (mfxFrameSurface1*)qf->frame->data[3];
     } else {
         /* make a copy if the input is not padded as libmfx requires */
-        if (     frame->height & (q->height_align - 1) ||
-            frame->linesize[0] & (q->width_align - 1)) {
-            qf->frame->height = FFALIGN(frame->height, q->height_align);
+        if (frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) {
+            qf->frame->height = FFALIGN(frame->height, 32);
             qf->frame->width  = FFALIGN(frame->width, q->width_align);

             ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF);
@@ -968,30 +998,21 @@  static int encode_frame(AVCodecContext *avctx, QSVEncContext *q,

     do {
         ret = MFXVideoENCODE_EncodeFrameAsync(q->session, enc_ctrl, surf, bs, sync);
-        if (ret == MFX_WRN_DEVICE_BUSY) {
+        if (ret == MFX_WRN_DEVICE_BUSY)
             av_usleep(500);
-            continue;
-        }
-        break;
-    } while ( 1 );
+    } while (ret > 0);

     if (ret < 0) {
         av_packet_unref(&new_pkt);
         av_freep(&bs);
-        if (ret == MFX_ERR_MORE_DATA)
-            return 0;
-        av_log(avctx, AV_LOG_ERROR, "EncodeFrameAsync returned %d\n", ret);
-        return ff_qsv_error(ret);
+        av_freep(&sync);
+        return (ret == MFX_ERR_MORE_DATA) ? 0 : ff_qsv_error(ret);
     }

-    if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM) {
-        if (frame->interlaced_frame)
-            print_interlace_msg(avctx, q);
-        else
-            av_log(avctx, AV_LOG_WARNING,
-                   "EncodeFrameAsync returned 'incompatible param' code\n");
-    }
-    if (sync) {
+    if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
+        print_interlace_msg(avctx, q);
+
+    if (*sync) {
         av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
         av_fifo_generic_write(q->async_fifo, &sync,    sizeof(sync),    NULL);
         av_fifo_generic_write(q->async_fifo, &bs,      sizeof(bs),    NULL);
@@ -1079,9 +1100,14 @@  int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q)

     if (q->session)
         MFXVideoENCODE_Close(q->session);
-    q->session = NULL;
-
-    ff_qsv_close_internal_session(&q->internal_qs);
+    if (q->internal_session)
+        MFXClose(q->internal_session);
+    q->session          = NULL;
+    q->internal_session = NULL;
+
+    av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
+    av_freep(&q->frames_ctx.mids);
+    q->frames_ctx.nb_mids = 0;

     cur = q->work_frames;
     while (cur) {
diff --git a/libavcodec/qsvenc.h b/libavcodec/qsvenc.h
index 2d7bd32..361d933 100644
--- a/libavcodec/qsvenc.h
+++ b/libavcodec/qsvenc.h
@@ -42,6 +42,7 @@ 
 #define QSV_HAVE_BREF_TYPE      QSV_VERSION_ATLEAST(1, 8)

 #define QSV_HAVE_LA     QSV_VERSION_ATLEAST(1, 7)
+#define QSV_HAVE_LA_DS  QSV_VERSION_ATLEAST(1, 8)
 #define QSV_HAVE_LA_HRD QSV_VERSION_ATLEAST(1, 11)
 #define QSV_HAVE_ICQ    QSV_VERSION_ATLEAST(1, 8)
 #define QSV_HAVE_VCM    QSV_VERSION_ATLEAST(1, 8)
@@ -79,11 +80,10 @@  typedef struct QSVEncContext {
     QSVFrame *work_frames;

     mfxSession session;
-    QSVSession internal_qs;
+    mfxSession internal_session;

     int packet_size;
     int width_align;
-    int height_align;

     mfxVideoParam param;
     mfxFrameAllocRequest req;
@@ -104,6 +104,8 @@  typedef struct QSVEncContext {

     AVFifoBuffer *async_fifo;

+    QSVFramesContext frames_ctx;
+
     // options set by the caller
     int async_depth;
     int idr_interval;
diff --git a/libavcodec/qsvenc_h264.c b/libavcodec/qsvenc_h264.c
index f5b01bb..7ff1a98 100644
--- a/libavcodec/qsvenc_h264.c
+++ b/libavcodec/qsvenc_h264.c
@@ -111,8 +111,7 @@  static const AVOption options[] = {
     { "look_ahead",       "Use VBR algorithm with look ahead",    OFFSET(qsv.look_ahead),       AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, VE },
     { "look_ahead_depth", "Depth of look ahead in number frames", OFFSET(qsv.look_ahead_depth), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 100, VE },
 #endif
-
-#if QSV_VERSION_ATLEAST(1,8)
+#if QSV_HAVE_LA_DS
     { "look_ahead_downsampling", NULL, OFFSET(qsv.look_ahead_downsampling), AV_OPT_TYPE_INT, { .i64 = MFX_LOOKAHEAD_DS_UNKNOWN }, MFX_LOOKAHEAD_DS_UNKNOWN, MFX_LOOKAHEAD_DS_2x, VE, "look_ahead_downsampling" },
     { "unknown"                , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_LOOKAHEAD_DS_UNKNOWN }, INT_MIN, INT_MAX,     VE, "look_ahead_downsampling" },
     { "off"                    , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_LOOKAHEAD_DS_OFF     }, INT_MIN, INT_MAX,     VE, "look_ahead_downsampling" },