summaryrefslogtreecommitdiffstats
path: root/contrib/ffmpeg
diff options
context:
space:
mode:
authorArtem Galin <[email protected]>2020-02-29 12:31:23 +0000
committerScott <[email protected]>2020-03-05 20:33:13 +0000
commitc121bf7c797218d4ba90b23c2c31d0eea0c64692 (patch)
treef4958666950430ff6fd9da7f0efd13ba7f89ec54 /contrib/ffmpeg
parentbc4a502dfab17ee88d5d2c12c7d91bb5e85bdb39 (diff)
qsv: hw decoding improvements
Diffstat (limited to 'contrib/ffmpeg')
-rw-r--r--contrib/ffmpeg/A13-qsv-dx11.patch938
1 files changed, 846 insertions, 92 deletions
diff --git a/contrib/ffmpeg/A13-qsv-dx11.patch b/contrib/ffmpeg/A13-qsv-dx11.patch
index 177f43bb7..8da246073 100644
--- a/contrib/ffmpeg/A13-qsv-dx11.patch
+++ b/contrib/ffmpeg/A13-qsv-dx11.patch
@@ -1,17 +1,90 @@
+diff --git a/fftools/ffmpeg_opt.c b/fftools/ffmpeg_opt.c
+index f5ca18aa64..75d9b5669b 100644
+--- a/fftools/ffmpeg_opt.c
++++ b/fftools/ffmpeg_opt.c
+@@ -506,7 +506,17 @@ static int opt_init_hw_device(void *optctx, const char *opt, const char *arg)
+ printf("\n");
+ exit_program(0);
+ } else {
+- return hw_device_init_from_string(arg, NULL);
++ HWDevice *dev;
++ int err;
++ if (!arg)
++ return AVERROR(ENOMEM);
++ err = hw_device_init_from_string(arg, &dev);
++ if (err < 0)
++ return err;
++ hw_device_ctx = av_buffer_ref(dev->device_ref);
++ if (!hw_device_ctx)
++ return AVERROR(ENOMEM);
++ return 0;
+ }
+ }
+
diff --git a/libavcodec/qsv.c b/libavcodec/qsv.c
-index 986d4f6022..35879834d8 100644
+index 986d4f6022..2f01976be7 100644
--- a/libavcodec/qsv.c
+++ b/libavcodec/qsv.c
-@@ -328,7 +328,7 @@ load_plugin_fail:
+@@ -36,6 +36,8 @@
+ #include "avcodec.h"
+ #include "qsv_internal.h"
+
++#define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
++
+ #if QSV_VERSION_ATLEAST(1, 12)
+ #include "mfx/mfxvp8.h"
+ #endif
+@@ -207,7 +209,7 @@ int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
+ return ret;
+ }
+
+-static enum AVPixelFormat qsv_map_fourcc(uint32_t fourcc)
++enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
+ {
+ switch (fourcc) {
+ case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
+@@ -245,6 +247,24 @@ int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
+ return AVERROR_BUG;
+ }
+
++enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
++{
++ enum AVFieldOrder field = AV_FIELD_UNKNOWN;
++ switch (mfx_pic_struct & 0xF) {
++ case MFX_PICSTRUCT_PROGRESSIVE:
++ field = AV_FIELD_PROGRESSIVE;
++ break;
++ case MFX_PICSTRUCT_FIELD_TFF:
++ field = AV_FIELD_TT;
++ break;
++ case MFX_PICSTRUCT_FIELD_BFF:
++ field = AV_FIELD_BB;
++ break;
++ }
++
++ return field;
++}
++
+ enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
+ {
+ enum AVPictureType type;
+@@ -328,8 +348,13 @@ load_plugin_fail:
int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
const char *load_plugins)
{
- mfxIMPL impl = MFX_IMPL_AUTO_ANY;
-+ mfxIMPL impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
- mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
+- mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
++#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
++ mfxIMPL impl = MFX_IMPL_AUTO_ANY;
++#else
++ mfxIMPL impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
++#endif
++ mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
++ mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
const char *desc;
-@@ -406,6 +406,7 @@ static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
+ int ret;
+@@ -406,6 +431,7 @@ static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
for (i = 0; i < nb_surfaces; i++) {
QSVMid *mid = &mids[i];
mid->handle = frames_hwctx->surfaces[i].Data.MemId;
@@ -19,125 +92,542 @@ index 986d4f6022..35879834d8 100644
mid->hw_frames_ref = hw_frames_ref1;
}
-@@ -615,7 +616,14 @@ static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
+@@ -500,7 +526,7 @@ static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
+ frames_hwctx = frames_ctx->hwctx;
+
+ frames_ctx->format = AV_PIX_FMT_QSV;
+- frames_ctx->sw_format = qsv_map_fourcc(i->FourCC);
++ frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
+ frames_ctx->width = i->Width;
+ frames_ctx->height = i->Height;
+ frames_ctx->initial_pool_size = req->NumFrameSuggested;
+@@ -615,7 +641,13 @@ static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
{
QSVMid *qsv_mid = (QSVMid*)mid;
- *hdl = qsv_mid->handle;
-+
+ if (qsv_mid->texture) {
-+ mfxHDLPair *pPair = (mfxHDLPair*)hdl;
-+ pPair->first = qsv_mid->texture;
-+ pPair->second = qsv_mid->handle;
++ mfxHDLPair *pair = (mfxHDLPair*)hdl;
++ pair->first = qsv_mid->texture;
++ pair->second = qsv_mid->handle;
+ } else {
+ *hdl = qsv_mid->handle;
+ }
return MFX_ERR_NONE;
}
-@@ -624,8 +632,8 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
- {
- static const mfxHandleType handle_types[] = {
- MFX_HANDLE_VA_DISPLAY,
-- MFX_HANDLE_D3D9_DEVICE_MANAGER,
- MFX_HANDLE_D3D11_DEVICE,
-+ MFX_HANDLE_D3D9_DEVICE_MANAGER,
- };
+@@ -630,12 +662,12 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
+ mfxSession parent_session = device_hwctx->session;
++ mfxHDL handle = NULL;
++ mfxHandleType handle_type = MFX_IMPL_AUTO;
+
+ mfxSession session;
+ mfxVersion ver;
+ mfxIMPL impl;
+- mfxHDL handle = NULL;
+- mfxHandleType handle_type;
+ mfxStatus err;
+
+ int i, ret;
+@@ -647,11 +679,26 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
+ return ff_qsv_print_error(avctx, err,
+ "Error querying the session attributes");
+
++ if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl))
++ {
++ handle_type = MFX_HANDLE_D3D11_DEVICE;
++ }
++ else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl))
++ {
++ handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
++ }
++ else if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl))
++ {
++ handle_type = MFX_HANDLE_VA_DISPLAY;
++ }
++
+ for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
+- err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
+- if (err == MFX_ERR_NONE) {
+- handle_type = handle_types[i];
+- break;
++ if (handle_types[i] == handle_type)
++ {
++ err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
++ if (err == MFX_ERR_NONE) {
++ break;
++ }
+ }
+ handle = NULL;
+ }
diff --git a/libavcodec/qsv_internal.h b/libavcodec/qsv_internal.h
-index b63a7d6a31..e8a766d15e 100644
+index b63a7d6a31..fe2cda4b22 100644
--- a/libavcodec/qsv_internal.h
+++ b/libavcodec/qsv_internal.h
-@@ -46,6 +46,8 @@ typedef struct QSVMid {
- AVBufferRef *hw_frames_ref;
- mfxHDL handle;
-
-+ void *texture;
-+
+@@ -49,6 +49,11 @@ typedef struct QSVMid {
AVFrame *locked_frame;
AVFrame *hw_frame;
mfxFrameSurface1 surf;
-diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
-index 6670c47579..096770b9ce 100644
---- a/libavutil/hwcontext_d3d11va.c
-+++ b/libavutil/hwcontext_d3d11va.c
-@@ -494,12 +494,12 @@ static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
++ /**
++ * ID3D11Texture2D texture in which the frame is located for D3D11VA device.
++ * Null in case of DXVA2 device.
++ */
++ void *texture;
+ } QSVMid;
+
+ typedef struct QSVFrame {
+@@ -92,9 +97,13 @@ int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id);
+ int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile);
+ int ff_qsv_level_to_mfx(enum AVCodecID codec_id, int level);
+
++enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc);
++
+ int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc);
+ enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type);
+
++enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct);
++
+ int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
+ const char *load_plugins);
+
+diff --git a/libavcodec/qsvdec.c b/libavcodec/qsvdec.c
+index 46aa2d6814..7e48c833ab 100644
+--- a/libavcodec/qsvdec.c
++++ b/libavcodec/qsvdec.c
+@@ -147,19 +147,21 @@ static int check_dec_param(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *
+ return 1;
+ }
+
+-static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
++static int qsv_decode_preinit(AVCodecContext *avctx, QSVContext *q, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
+ {
+- const AVPixFmtDescriptor *desc;
+ mfxSession session = NULL;
+ int iopattern = 0;
+- mfxVideoParam param = { 0 };
+- int frame_width = avctx->coded_width;
+- int frame_height = avctx->coded_height;
+ int ret;
++ enum AVPixelFormat pix_fmts[3] = {
++ AV_PIX_FMT_QSV, /* opaque format in case of video memory output */
++ pix_fmt, /* system memory format obtained from bitstream parser */
++ AV_PIX_FMT_NONE };
+
+- desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
+- if (!desc)
+- return AVERROR_BUG;
++ ret = ff_get_format(avctx, pix_fmts);
++ if (ret < 0) {
++ q->orig_pix_fmt = avctx->pix_fmt = AV_PIX_FMT_NONE;
++ return ret;
++ }
+
+ if (!q->async_fifo) {
+ q->async_fifo = av_fifo_alloc(q->async_depth * qsv_fifo_item_size());
+@@ -197,54 +199,72 @@ static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
+ return ret;
}
- if (device_hwctx->video_device) {
-- ID3D11VideoDevice_Release(device_hwctx->video_device);
-+ //ID3D11VideoDevice_Release(device_hwctx->video_device);
- device_hwctx->video_device = NULL;
+- ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
+- if (ret < 0)
+- return ret;
++ param->IOPattern = q->iopattern;
++ param->AsyncDepth = q->async_depth;
++ param->ExtParam = q->ext_buffers;
++ param->NumExtParam = q->nb_ext_buffers;
+
+- param.mfx.CodecId = ret;
+- param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
+- param.mfx.CodecLevel = ff_qsv_level_to_mfx(avctx->codec_id, avctx->level);
+-
+- param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
+- param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
+- param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
+- param.mfx.FrameInfo.FourCC = q->fourcc;
+- param.mfx.FrameInfo.Width = frame_width;
+- param.mfx.FrameInfo.Height = frame_height;
+- param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
+-
+- switch (avctx->field_order) {
+- case AV_FIELD_PROGRESSIVE:
+- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
+- break;
+- case AV_FIELD_TT:
+- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
+- break;
+- case AV_FIELD_BB:
+- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
+- break;
+- default:
+- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
+- break;
+- }
++ return 0;
++ }
+
+- param.IOPattern = q->iopattern;
+- param.AsyncDepth = q->async_depth;
+- param.ExtParam = q->ext_buffers;
+- param.NumExtParam = q->nb_ext_buffers;
++static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *param)
++{
++ int ret;
+
+- if (!check_dec_param(avctx, q, &param)) {
+- //Just give a warning instead of an error since it is still decodable possibly.
+- av_log(avctx, AV_LOG_WARNING,
+- "Current input bitstream is not supported by QSV decoder.\n");
+- }
++ avctx->width = param->mfx.FrameInfo.CropW;
++ avctx->height = param->mfx.FrameInfo.CropH;
++ avctx->coded_width = param->mfx.FrameInfo.Width;
++ avctx->coded_height = param->mfx.FrameInfo.Height;
++ avctx->level = param->mfx.CodecLevel;
++ avctx->profile = param->mfx.CodecProfile;
++ avctx->field_order = ff_qsv_map_picstruct(param->mfx.FrameInfo.PicStruct);
++ avctx->pix_fmt = ff_qsv_map_fourcc(param->mfx.FrameInfo.FourCC);
+
+- ret = MFXVideoDECODE_Init(q->session, &param);
++ ret = MFXVideoDECODE_Init(q->session, param);
+ if (ret < 0)
+ return ff_qsv_print_error(avctx, ret,
+ "Error initializing the MFX video decoder");
+
+- q->frame_info = param.mfx.FrameInfo;
++ q->frame_info = param->mfx.FrameInfo;
++
++ return 0;
++}
++
++static int qsv_decode_header(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
++{
++ int ret;
++
++ mfxBitstream bs = { 0 };
++
++ if (avpkt->size) {
++ bs.Data = avpkt->data;
++ bs.DataLength = avpkt->size;
++ bs.MaxLength = bs.DataLength;
++ bs.TimeStamp = avpkt->pts;
++ if (avctx->field_order == AV_FIELD_PROGRESSIVE)
++ bs.DataFlag |= MFX_BITSTREAM_COMPLETE_FRAME;
++ } else
++ return AVERROR_INVALIDDATA;
++
++
++ if(!q->session) {
++ ret = qsv_decode_preinit(avctx, q, pix_fmt, param);
++ if (ret < 0)
++ return ret;
++ }
++
++ ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
++ if (ret < 0)
++ return ret;
++
++ param->mfx.CodecId = ret;
++ ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, param);
++ if (MFX_ERR_MORE_DATA == ret) {
++ return AVERROR(EAGAIN);
++ }
++ if (ret < 0)
++ return ff_qsv_print_error(avctx, ret,
++ "Error decoding stream header");
+
+ return 0;
+ }
+@@ -527,7 +547,8 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
+ uint8_t *dummy_data;
+ int dummy_size;
+ int ret;
+- const AVPixFmtDescriptor *desc;
++ mfxVideoParam param = { 0 };
++ enum AVPixelFormat pix_fmt = AV_PIX_FMT_NV12;
+
+ if (!q->avctx_internal) {
+ q->avctx_internal = avcodec_alloc_context3(NULL);
+@@ -541,7 +562,6 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
+ return AVERROR(ENOMEM);
+
+ q->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
+- q->orig_pix_fmt = AV_PIX_FMT_NONE;
}
- if (device_hwctx->video_context) {
-- ID3D11VideoContext_Release(device_hwctx->video_context);
-+ //ID3D11VideoContext_Release(device_hwctx->video_context);
- device_hwctx->video_context = NULL;
+ if (!pkt->size)
+@@ -554,15 +574,23 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
+ pkt->data, pkt->size, pkt->pts, pkt->dts,
+ pkt->pos);
+
+- avctx->field_order = q->parser->field_order;
+ /* TODO: flush delayed frames on reinit */
+- if (q->parser->format != q->orig_pix_fmt ||
+- FFALIGN(q->parser->coded_width, 16) != FFALIGN(avctx->coded_width, 16) ||
+- FFALIGN(q->parser->coded_height, 16) != FFALIGN(avctx->coded_height, 16)) {
+- enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
+- AV_PIX_FMT_NONE,
+- AV_PIX_FMT_NONE };
+- enum AVPixelFormat qsv_format;
++
++ // sw_pix_fmt, coded_width/height should be set for ff_get_format(),
++ // assume sw_pix_fmt is NV12 and coded_width/height to be 1280x720,
++ // the assumption may be not corret but will be updated after header decoded if not true.
++ if (q->orig_pix_fmt != AV_PIX_FMT_NONE)
++ pix_fmt = q->orig_pix_fmt;
++ if (!avctx->coded_width)
++ avctx->coded_width = 1280;
++ if (!avctx->coded_height)
++ avctx->coded_height = 720;
++
++ ret = qsv_decode_header(avctx, q, pkt, pix_fmt, &param);
++
++ if (ret >= 0 && (q->orig_pix_fmt != ff_qsv_map_fourcc(param.mfx.FrameInfo.FourCC) ||
++ avctx->coded_width != param.mfx.FrameInfo.Width ||
++ avctx->coded_height != param.mfx.FrameInfo.Height)) {
+ AVPacket zero_pkt = {0};
+
+ if (q->buffered_count) {
+@@ -571,45 +599,24 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
+ q->buffered_count--;
+ return qsv_decode(avctx, q, frame, got_frame, &zero_pkt);
+ }
+-
+ q->reinit_flag = 0;
+
+- qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc);
+- if (qsv_format < 0) {
+- av_log(avctx, AV_LOG_ERROR,
+- "Decoding pixel format '%s' is not supported\n",
+- av_get_pix_fmt_name(q->parser->format));
+- ret = AVERROR(ENOSYS);
+- goto reinit_fail;
+- }
++ q->orig_pix_fmt = avctx->pix_fmt = pix_fmt = ff_qsv_map_fourcc(param.mfx.FrameInfo.FourCC);
+
+- q->orig_pix_fmt = q->parser->format;
+- avctx->pix_fmt = pix_fmts[1] = qsv_format;
+- avctx->width = q->parser->width;
+- avctx->height = q->parser->height;
+- avctx->coded_width = FFALIGN(q->parser->coded_width, 16);
+- avctx->coded_height = FFALIGN(q->parser->coded_height, 16);
+- avctx->level = q->avctx_internal->level;
+- avctx->profile = q->avctx_internal->profile;
++ avctx->coded_width = param.mfx.FrameInfo.Width;
++ avctx->coded_height = param.mfx.FrameInfo.Height;
+
+- ret = ff_get_format(avctx, pix_fmts);
++ ret = qsv_decode_preinit(avctx, q, pix_fmt, &param);
+ if (ret < 0)
+ goto reinit_fail;
++ q->initialized = 0;
++ }
+
+- avctx->pix_fmt = ret;
+-
+- desc = av_pix_fmt_desc_get(avctx->pix_fmt);
+- if (!desc)
+- goto reinit_fail;
+-
+- if (desc->comp[0].depth > 8) {
+- avctx->coded_width = FFALIGN(q->parser->coded_width, 32);
+- avctx->coded_height = FFALIGN(q->parser->coded_height, 32);
+- }
+-
+- ret = qsv_decode_init(avctx, q);
++ if (!q->initialized) {
++ ret = qsv_decode_init(avctx, q, &param);
+ if (ret < 0)
+ goto reinit_fail;
++ q->initialized = 1;
}
-@@ -510,6 +510,42 @@ static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
+ return qsv_decode(avctx, q, frame, got_frame, pkt);
+@@ -622,4 +629,5 @@ reinit_fail:
+ void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
+ {
+ q->orig_pix_fmt = AV_PIX_FMT_NONE;
++ q->initialized = 0;
+ }
+diff --git a/libavcodec/qsvdec.h b/libavcodec/qsvdec.h
+index 111536caba..4812fb2a6b 100644
+--- a/libavcodec/qsvdec.h
++++ b/libavcodec/qsvdec.h
+@@ -63,6 +63,8 @@ typedef struct QSVContext {
+ uint32_t fourcc;
+ mfxFrameInfo frame_info;
+
++ int initialized;
++
+ // options set by the caller
+ int async_depth;
+ int iopattern;
+diff --git a/libavcodec/qsvdec_h2645.c b/libavcodec/qsvdec_h2645.c
+index 9b49f5506e..eb1dc336a4 100644
+--- a/libavcodec/qsvdec_h2645.c
++++ b/libavcodec/qsvdec_h2645.c
+@@ -103,6 +103,7 @@ static av_cold int qsv_decode_init(AVCodecContext *avctx)
+ }
+ }
+
++ s->qsv.orig_pix_fmt = AV_PIX_FMT_NV12;
+ s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
+ if (!s->packet_fifo) {
+ ret = AVERROR(ENOMEM);
+diff --git a/libavcodec/qsvdec_other.c b/libavcodec/qsvdec_other.c
+index 03251d2c85..a6f1b88ca0 100644
+--- a/libavcodec/qsvdec_other.c
++++ b/libavcodec/qsvdec_other.c
+@@ -90,6 +90,7 @@ static av_cold int qsv_decode_init(AVCodecContext *avctx)
+ }
+ #endif
+
++ s->qsv.orig_pix_fmt = AV_PIX_FMT_NV12;
+ s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
+ if (!s->packet_fifo) {
+ ret = AVERROR(ENOMEM);
+diff --git a/libavfilter/qsvvpp.c b/libavfilter/qsvvpp.c
+index 8d5ff2eb65..61135541e5 100644
+--- a/libavfilter/qsvvpp.c
++++ b/libavfilter/qsvvpp.c
+@@ -32,10 +32,11 @@
+ #include "qsvvpp.h"
+ #include "video.h"
+
+-#define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
++#define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
+ MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
+-#define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
+-#define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
++#define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
++#define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
++#define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
+
+ typedef struct QSVFrame {
+ AVFrame *frame;
+@@ -405,12 +406,12 @@ static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
+ AVFilterLink *outlink = avctx->outputs[0];
+ AVQSVFramesContext *in_frames_hwctx = NULL;
+ AVQSVFramesContext *out_frames_hwctx = NULL;
++ mfxHandleType handle_type = MFX_IMPL_AUTO;
+
+ AVBufferRef *device_ref;
+ AVHWDeviceContext *device_ctx;
+ AVQSVDeviceContext *device_hwctx;
+ mfxHDL handle;
+- mfxHandleType handle_type;
+ mfxVersion ver;
+ mfxIMPL impl;
+ int ret, i;
+@@ -497,15 +498,30 @@ static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
+ return AVERROR_UNKNOWN;
+ }
+
++ if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl))
++ {
++ handle_type = MFX_HANDLE_D3D11_DEVICE;
++ }
++ else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl))
++ {
++ handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
++ }
++ else if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl))
++ {
++ handle_type = MFX_HANDLE_VA_DISPLAY;
++ }
++
+ for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
+- ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
+- if (ret == MFX_ERR_NONE) {
+- handle_type = handle_types[i];
+- break;
++ if (handle_types[i] == handle_type)
++ {
++ ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
++ if (ret == MFX_ERR_NONE) {
++ break;
++ }
+ }
++ handle = NULL;
+ }
+-
+- if (ret != MFX_ERR_NONE) {
++ if (!handle) {
+ av_log(avctx, AV_LOG_ERROR, "Error getting the session handle\n");
+ return AVERROR_UNKNOWN;
+ }
+diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
+index 6670c47579..34e1ece437 100644
+--- a/libavutil/hwcontext_d3d11va.c
++++ b/libavutil/hwcontext_d3d11va.c
+@@ -510,15 +510,57 @@ static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
}
}
-+static int d3d11va_device_find_qsv_adapter(AVHWDeviceContext *ctx, UINT creationFlags)
++static int d3d11va_device_find_adapter_by_vendor_id(AVHWDeviceContext *ctx, UINT creationFlags, char *vendor)
+{
+ HRESULT hr;
-+ IDXGIAdapter *pAdapter = NULL;
++ IDXGIAdapter *adapter = NULL;
+ int adapter_id = 0;
-+ IDXGIFactory2 *pDXGIFactory;
-+ hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
-+ while (IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter_id++, &pAdapter) != DXGI_ERROR_NOT_FOUND)
++ IDXGIFactory2 *factory;
++ long int vendor_id = strtol(vendor, NULL, 0);
++ hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&factory);
++ while (IDXGIFactory2_EnumAdapters(factory, adapter_id++, &adapter) != DXGI_ERROR_NOT_FOUND)
+ {
-+ ID3D11Device* g_pd3dDevice = NULL;
-+ DXGI_ADAPTER_DESC adapterDesc;
++ ID3D11Device* device = NULL;
++ DXGI_ADAPTER_DESC adapter_desc;
+
-+ hr = mD3D11CreateDevice(pAdapter, D3D_DRIVER_TYPE_UNKNOWN, NULL, creationFlags, NULL, 0, D3D11_SDK_VERSION, &g_pd3dDevice, NULL, NULL);
++ hr = mD3D11CreateDevice(adapter, D3D_DRIVER_TYPE_UNKNOWN, NULL, creationFlags, NULL, 0, D3D11_SDK_VERSION, &device, NULL, NULL);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "D3D11CreateDevice returned error\n");
+ continue;
+ }
+
-+ hr = IDXGIAdapter2_GetDesc(pAdapter, &adapterDesc);
++ hr = IDXGIAdapter2_GetDesc(adapter, &adapter_desc);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "IDXGIAdapter2_GetDesc returned error\n");
+ continue;
+ }
+
-+ if (pAdapter)
-+ IDXGIAdapter_Release(pAdapter);
++ if (device)
++ ID3D11Device_Release(device);
++
++ if (adapter)
++ IDXGIAdapter_Release(adapter);
+
-+ if (adapterDesc.VendorId == 0x8086) {
-+ IDXGIFactory2_Release(pDXGIFactory);
++ if (adapter_desc.VendorId == vendor_id) {
++ IDXGIFactory2_Release(factory);
+ return adapter_id - 1;
+ }
+ }
-+ IDXGIFactory2_Release(pDXGIFactory);
++ IDXGIFactory2_Release(factory);
+ return -1;
+}
+
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
-@@ -519,7 +555,9 @@ static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
+ AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
+
+ HRESULT hr;
++ AVDictionaryEntry *e;
IDXGIAdapter *pAdapter = NULL;
ID3D10Multithread *pMultithread;
UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
+ int adapter = -1;
int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
-+ int is_qsv = !!av_dict_get(opts, "d3d11va_qsv", NULL, 0);
int ret;
- // (On UWP we can't check this.)
-@@ -538,11 +576,22 @@ static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
+@@ -538,11 +580,23 @@ static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
return AVERROR_UNKNOWN;
}
-+ if (is_qsv) {
-+ adapter = d3d11va_device_find_qsv_adapter(ctx, creationFlags);
++ e = av_dict_get(opts, "vendor", NULL, 0);
++ if (e) {
++ adapter = d3d11va_device_find_adapter_by_vendor_id(ctx, creationFlags, e ? e->value : NULL);
+ if (adapter < 0) {
-+ av_log(ctx, AV_LOG_ERROR, "Failed to find DX11 adapter with QSV support\n");
++ av_log(ctx, AV_LOG_ERROR, "Failed to find d3d11va adapter by vendor id\n");
+ return AVERROR_UNKNOWN;
+ }
+ }
@@ -155,10 +645,14 @@ index 6670c47579..096770b9ce 100644
pAdapter = NULL;
IDXGIFactory2_Release(pDXGIFactory);
diff --git a/libavutil/hwcontext_qsv.c b/libavutil/hwcontext_qsv.c
-index 59e4ed9157..56f3ccc94a 100644
+index 59e4ed9157..270ef976d7 100644
--- a/libavutil/hwcontext_qsv.c
+++ b/libavutil/hwcontext_qsv.c
-@@ -30,6 +30,9 @@
+@@ -27,9 +27,13 @@
+ #include <pthread.h>
+ #endif
+
++#define COBJMACROS
#if CONFIG_VAAPI
#include "hwcontext_vaapi.h"
#endif
@@ -168,45 +662,118 @@ index 59e4ed9157..56f3ccc94a 100644
#if CONFIG_DXVA2
#include "hwcontext_dxva2.h"
#endif
-@@ -71,7 +74,7 @@ typedef struct QSVFramesContext {
- AVBufferRef *child_frames_ref;
- mfxFrameSurface1 *surfaces_internal;
- int nb_surfaces_used;
--
-+ void *texture;
- // used in the frame allocator for non-opaque surfaces
- mfxMemId *mem_ids;
- // used in the opaque alloc request for opaque surfaces
-@@ -89,6 +92,9 @@ static const struct {
+@@ -44,6 +48,8 @@
+ #include "pixdesc.h"
+ #include "time.h"
+
++#define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
++
+ typedef struct QSVDevicePriv {
+ AVBufferRef *child_device_ctx;
+ } QSVDevicePriv;
+@@ -89,6 +95,9 @@ static const struct {
#if CONFIG_VAAPI
{ MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
#endif
+#if CONFIG_D3D11VA
-+ { MFX_HANDLE_D3D11_DEVICE, AV_HWDEVICE_TYPE_D3D11VA, AV_PIX_FMT_D3D11 },
++ { MFX_HANDLE_D3D11_DEVICE, AV_HWDEVICE_TYPE_D3D11VA, AV_PIX_FMT_D3D11 },
+#endif
#if CONFIG_DXVA2
{ MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
#endif
-@@ -229,6 +235,12 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
+@@ -119,25 +128,11 @@ static int qsv_device_init(AVHWDeviceContext *ctx)
+ {
+ AVQSVDeviceContext *hwctx = ctx->hwctx;
+ QSVDeviceContext *s = ctx->internal->priv;
++ mfxHandleType handle_type = MFX_IMPL_AUTO;
+
+ mfxStatus err;
+ int i;
+
+- for (i = 0; supported_handle_types[i].handle_type; i++) {
+- err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
+- &s->handle);
+- if (err == MFX_ERR_NONE) {
+- s->handle_type = supported_handle_types[i].handle_type;
+- s->child_device_type = supported_handle_types[i].device_type;
+- s->child_pix_fmt = supported_handle_types[i].pix_fmt;
+- break;
+- }
+- }
+- if (!s->handle) {
+- av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
+- "from the session\n");
+- }
+-
+ err = MFXQueryIMPL(hwctx->session, &s->impl);
+ if (err == MFX_ERR_NONE)
+ err = MFXQueryVersion(hwctx->session, &s->ver);
+@@ -146,6 +141,36 @@ static int qsv_device_init(AVHWDeviceContext *ctx)
+ return AVERROR_UNKNOWN;
+ }
+
++ if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl))
++ {
++ handle_type = MFX_HANDLE_D3D11_DEVICE;
++ }
++ else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl))
++ {
++ handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
++ }
++ else if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl))
++ {
++ handle_type = MFX_HANDLE_VA_DISPLAY;
++ }
++
++ for (i = 0; supported_handle_types[i].handle_type; i++) {
++ if (supported_handle_types[i].handle_type == handle_type) {
++ err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
++ &s->handle);
++ if (err == MFX_ERR_NONE) {
++ s->handle_type = supported_handle_types[i].handle_type;
++ s->child_device_type = supported_handle_types[i].device_type;
++ s->child_pix_fmt = supported_handle_types[i].pix_fmt;
++ break;
++ }
++ }
++ }
++ if (!s->handle) {
++ av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
++ "from the session\n");
++ }
++
+ return 0;
+ }
+
+@@ -222,13 +247,19 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
+ if (!child_device_ref)
+ return AVERROR(ENOMEM);
+ child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
+-
+ #if CONFIG_VAAPI
+ if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
+ AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
child_device_hwctx->display = (VADisplay)device_priv->handle;
}
#endif
+#if CONFIG_D3D11VA
+ if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
+ AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
++ ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
+ child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
+ }
+#endif
#if CONFIG_DXVA2
if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
-@@ -255,6 +267,15 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
+@@ -255,6 +286,16 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
child_frames_ctx->width = FFALIGN(ctx->width, 16);
child_frames_ctx->height = FFALIGN(ctx->height, 16);
+#if CONFIG_D3D11VA
+ if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
+ AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
++ child_frames_hwctx->MiscFlags |= D3D11_RESOURCE_MISC_SHARED;
+ if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
+ child_frames_hwctx->BindFlags = D3D11_BIND_RENDER_TARGET ;
+ else
@@ -216,7 +783,7 @@ index 59e4ed9157..56f3ccc94a 100644
#if CONFIG_DXVA2
if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
-@@ -279,6 +300,18 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
+@@ -279,6 +320,18 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
}
#endif
@@ -225,7 +792,7 @@ index 59e4ed9157..56f3ccc94a 100644
+ AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
+ hwctx->texture = child_frames_hwctx->texture;
+ for (i = 0; i < ctx->initial_pool_size; i++)
-+ s->surfaces_internal[i].Data.MemId = (mfxMemId)i;
++ s->surfaces_internal[i].Data.MemId = (mfxMemId)(int64_t)i;
+ if (child_frames_hwctx->BindFlags == D3D11_BIND_DECODER)
+ hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
+ else
@@ -235,7 +802,98 @@ index 59e4ed9157..56f3ccc94a 100644
#if CONFIG_DXVA2
if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
-@@ -1074,7 +1107,7 @@ static void qsv_device_free(AVHWDeviceContext *ctx)
+@@ -421,7 +474,16 @@ static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
+
+ static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
+ {
+- *hdl = mid;
++ AVHWFramesContext *ctx = pthis;
++ AVQSVFramesContext *hwctx = ctx->hwctx;
++
++ if (hwctx->texture) {
++ mfxHDLPair *pair = (mfxHDLPair*)hdl;
++ pair->first = hwctx->texture;
++ pair->second = mid;
++ } else {
++ *hdl = mid;
++ }
+ return MFX_ERR_NONE;
+ }
+
+@@ -668,6 +730,11 @@ static int qsv_map_from(AVHWFramesContext *ctx,
+ child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
+ break;
+ #endif
++#if CONFIG_D3D11VA
++ case AV_HWDEVICE_TYPE_D3D11VA:
++ child_data = surf->Data.MemId;
++ break;
++#endif
+ #if CONFIG_DXVA2
+ case AV_HWDEVICE_TYPE_DXVA2:
+ child_data = surf->Data.MemId;
+@@ -972,6 +1039,27 @@ static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
+ }
+ break;
+ #endif
++#if CONFIG_D3D11VA
++ case AV_HWDEVICE_TYPE_D3D11VA:
++ {
++ AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
++ s->surfaces_internal = av_mallocz_array(src_ctx->initial_pool_size,
++ sizeof(*s->surfaces_internal));
++ if (!s->surfaces_internal)
++ return AVERROR(ENOMEM);
++ dst_hwctx->texture = src_hwctx->texture;
++ for (i = 0; i < src_ctx->initial_pool_size; i++) {
++ qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
++ s->surfaces_internal[i].Data.MemId = (mfxMemId)(int64_t)i;
++ }
++ dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
++ if (src_hwctx->BindFlags == D3D11_BIND_DECODER)
++ dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
++ else
++ dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
++ }
++ break;
++#endif
+ #if CONFIG_DXVA2
+ case AV_HWDEVICE_TYPE_DXVA2:
+ {
+@@ -1009,14 +1097,26 @@ static int qsv_map_to(AVHWFramesContext *dst_ctx,
+
+ for (i = 0; i < hwctx->nb_surfaces; i++) {
+ #if CONFIG_VAAPI
+- if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
+- (VASurfaceID)(uintptr_t)src->data[3])
+- break;
++ if (AV_PIX_FMT_VAAPI == src->format) {
++ if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
++ (VASurfaceID)(uintptr_t)src->data[3])
++ break;
++ }
++#endif
++#if CONFIG_D3D11VA
++ if (AV_PIX_FMT_D3D11 == src->format) {
++ if ((hwctx->texture == (ID3D11Texture2D*)(uintptr_t)src->data[0]) &&
++ ((ID3D11Texture2D*)hwctx->surfaces[i].Data.MemId ==
++ (ID3D11Texture2D*)(uintptr_t)src->data[1]))
++ break;
++ }
+ #endif
+ #if CONFIG_DXVA2
+- if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
+- (IDirect3DSurface9*)(uintptr_t)src->data[3])
+- break;
++ if (AV_PIX_FMT_DXVA2_VLD == src->format) {
++ if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
++ (IDirect3DSurface9*)(uintptr_t)src->data[3])
++ break;
++ }
+ #endif
+ }
+ if (i >= hwctx->nb_surfaces) {
+@@ -1074,7 +1174,7 @@ static void qsv_device_free(AVHWDeviceContext *ctx)
av_freep(&priv);
}
@@ -244,7 +902,7 @@ index 59e4ed9157..56f3ccc94a 100644
{
static const struct {
const char *name;
-@@ -1103,6 +1136,10 @@ static mfxIMPL choose_implementation(const char *device)
+@@ -1103,6 +1203,10 @@ static mfxIMPL choose_implementation(const char *device)
impl = strtol(device, NULL, 0);
}
@@ -255,7 +913,7 @@ index 59e4ed9157..56f3ccc94a 100644
return impl;
}
-@@ -1129,6 +1166,15 @@ static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
+@@ -1129,6 +1233,15 @@ static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
}
break;
#endif
@@ -271,22 +929,97 @@ index 59e4ed9157..56f3ccc94a 100644
#if CONFIG_DXVA2
case AV_HWDEVICE_TYPE_DXVA2:
{
-@@ -1231,9 +1277,12 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
- // possible, even when multiple devices and drivers are available.
- av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
- av_dict_set(&child_device_opts, "driver", "iHD", 0);
+@@ -1196,8 +1309,10 @@ fail:
+ static int qsv_device_derive(AVHWDeviceContext *ctx,
+ AVHWDeviceContext *child_device_ctx, int flags)
+ {
+- return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
+- child_device_ctx, flags);
++ mfxIMPL impl;
++ impl = choose_implementation("hw_any", child_device_ctx->type);
++ return qsv_device_derive_from_child(ctx, impl,
++ child_device_ctx, flags);
+ }
+
+ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
+@@ -1219,25 +1334,63 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
+ ctx->user_opaque = priv;
+ ctx->free = qsv_device_free;
+
+- e = av_dict_get(opts, "child_device", NULL, 0);
+-
+- child_device_opts = NULL;
+- if (CONFIG_VAAPI) {
++ e = av_dict_get(opts, "child_device_type", NULL, 0);
++ if (e) {
++ child_device_type = av_hwdevice_find_type_by_name(e ? e->value : NULL);
++ if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
++ av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
++ "\"%s\".\n", e ? e->value : NULL);
++ return AVERROR(EINVAL);
++ }
++ } else if (CONFIG_VAAPI) {
+ child_device_type = AV_HWDEVICE_TYPE_VAAPI;
+- // libmfx does not actually implement VAAPI properly, rather it
+- // depends on the specific behaviour of a matching iHD driver when
+- // used on recent Intel hardware. Set options to the VAAPI device
+- // creation so that we should pick a usable setup by default if
+- // possible, even when multiple devices and drivers are available.
+- av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
+- av_dict_set(&child_device_opts, "driver", "iHD", 0);
- } else if (CONFIG_DXVA2)
-+ } else if (CONFIG_D3D11VA) {
-+ child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
-+ av_dict_set(&child_device_opts, "d3d11va_qsv", "enabled", 0);
+ } else if (CONFIG_DXVA2) {
child_device_type = AV_HWDEVICE_TYPE_DXVA2;
- else {
++ } else if (CONFIG_D3D11VA) {
++ child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
+ } else {
av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
return AVERROR(ENOSYS);
}
-@@ -1245,7 +1294,7 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
+
++ child_device_opts = NULL;
++ switch (child_device_type) {
++#if CONFIG_VAAPI
++ case AV_HWDEVICE_TYPE_VAAPI:
++ {
++ // libmfx does not actually implement VAAPI properly, rather it
++ // depends on the specific behaviour of a matching iHD driver when
++ // used on recent Intel hardware. Set options to the VAAPI device
++ // creation so that we should pick a usable setup by default if
++ // possible, even when multiple devices and drivers are available.
++ av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
++ av_dict_set(&child_device_opts, "driver", "iHD", 0);
++ }
++ break;
++#endif
++#if CONFIG_D3D11VA
++ case AV_HWDEVICE_TYPE_D3D11VA:
++ {
++ printf("AV_HWDEVICE_TYPE_D3D11VA has been selected\n");
++ }
++ break;
++#endif
++#if CONFIG_DXVA2
++ case AV_HWDEVICE_TYPE_DXVA2:
++ {
++ printf("AV_HWDEVICE_TYPE_DXVA2 has been selected\n");
++ }
++ break;
++#endif
++ default:
++ {
++ av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
++ return AVERROR(ENOSYS);
++ }
++ break;
++ }
++
++ e = av_dict_get(opts, "child_device", NULL, 0);
+ ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
+ e ? e->value : NULL, child_device_opts, 0);
+ if (ret < 0)
+@@ -1245,7 +1398,7 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
@@ -296,14 +1029,35 @@ index 59e4ed9157..56f3ccc94a 100644
return qsv_device_derive_from_child(ctx, impl, child_device, 0);
}
diff --git a/libavutil/hwcontext_qsv.h b/libavutil/hwcontext_qsv.h
-index b98d611cfc..f5a9691949 100644
+index b98d611cfc..d21d58eb61 100644
--- a/libavutil/hwcontext_qsv.h
+++ b/libavutil/hwcontext_qsv.h
-@@ -42,6 +42,7 @@ typedef struct AVQSVDeviceContext {
+@@ -34,6 +34,15 @@
+ */
+ typedef struct AVQSVDeviceContext {
+ mfxSession session;
++ /**
++ * Need to store actual handle type that session uses
++ * MFXVideoCORE_GetHandle() function returns mfxHandleType
++ * always equal to MFX_HANDLE_D3D9_DEVICE_MANAGER
++ * even when MFX_HANDLE_D3D11_DEVICE was set as handle before by
++ * MFXVideoCORE_SetHandle() to mfx session.
++ * Fixed already but will be available only with latest driver.
++ */
++ //mfxHandleType handle_type;
+ } AVQSVDeviceContext;
+
+ /**
+@@ -42,11 +51,11 @@ typedef struct AVQSVDeviceContext {
typedef struct AVQSVFramesContext {
mfxFrameSurface1 *surfaces;
int nb_surfaces;
-+ void *texture;
-
+-
/**
* A combination of MFX_MEMTYPE_* describing the frame pool.
+ */
+ int frame_type;
++ void *texture;
+ } AVQSVFramesContext;
+
+ #endif /* AVUTIL_HWCONTEXT_QSV_H */