summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBradley Sepos <[email protected]>2020-06-18 01:51:35 -0400
committerBradley Sepos <[email protected]>2020-08-22 17:20:34 -0400
commit634b44ea56e7f942a5a4eb1c50a99a2cb14b2699 (patch)
treed74e49a8eba7af5a9f592b8c2d4bc8ba76485f19
parentb6e58390657244f467e9664aa6044dadbe91a65e (diff)
contrib: Update to FFmpeg 4:3.
-rw-r--r--contrib/ffmpeg/A00-amf-h264-loop.patch23
-rw-r--r--contrib/ffmpeg/A01-riff-tag-h265-in-avi.patch26
-rw-r--r--contrib/ffmpeg/A04-videotoolbox.patch27
-rwxr-xr-xcontrib/ffmpeg/A12-FFmpeg-devel-Vulkan-support.patch78
-rw-r--r--contrib/ffmpeg/A12-videotoolbox-smpte2084-hlg-transfer.patch69
-rw-r--r--contrib/ffmpeg/A13-qsv-dx11.patch1063
-rw-r--r--contrib/ffmpeg/A19-matroskaenc-aac-extradata-updated.patch24
-rw-r--r--contrib/ffmpeg/A19-matroskaenc-aac-extradata.patch36
-rw-r--r--contrib/ffmpeg/module.defs8
9 files changed, 28 insertions, 1326 deletions
diff --git a/contrib/ffmpeg/A00-amf-h264-loop.patch b/contrib/ffmpeg/A00-amf-h264-loop.patch
deleted file mode 100644
index 5617dbc65..000000000
--- a/contrib/ffmpeg/A00-amf-h264-loop.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-From 27da30adddc6f9c5fcbc0d3a0b791db061d179cf Mon Sep 17 00:00:00 2001
-From: OvchinnikovDmitrii <[email protected]>
-Date: Fri, 11 Oct 2019 18:37:13 +0300
-Subject: [PATCH] libavcodec/amfenc_h264.c: Changed loop filter flag default
- value.
-
-The value has been changed to match the recommended(by AMF Encoder team)
----
- libavcodec/amfenc_h264.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/libavcodec/amfenc_h264.c b/libavcodec/amfenc_h264.c
-index 2c082e93bd9..7f2817f1157 100644
---- a/libavcodec/amfenc_h264.c
-+++ b/libavcodec/amfenc_h264.c
-@@ -366,6 +366,7 @@ static const AVCodecDefault defaults[] = {
- { "b", "2M" },
- { "g", "250" },
- { "slices", "1" },
-+ { "flags", "+loop"},
- { NULL },
- };
-
diff --git a/contrib/ffmpeg/A01-riff-tag-h265-in-avi.patch b/contrib/ffmpeg/A01-riff-tag-h265-in-avi.patch
deleted file mode 100644
index 17b6cde2c..000000000
--- a/contrib/ffmpeg/A01-riff-tag-h265-in-avi.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From 6c79abcfaa30675943081ef62caa984f9185a3b3 Mon Sep 17 00:00:00 2001
-From: Tim Walker <[email protected]>
-Date: Wed, 5 Feb 2020 22:34:34 +0100
-Subject: [PATCH] avformat/avidec: add support for recognizing H265 fourcc when demuxing
-
-See commit 2e31774b409d77f046f166fb3ff630a9ef91def7
-
----
- libavformat/riff.c | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/libavformat/riff.c b/libavformat/riff.c
-index c73f6e9db0..32914f9ab0 100644
---- a/libavformat/riff.c
-+++ b/libavformat/riff.c
-@@ -496,6 +496,7 @@ const AVCodecTag ff_codec_bmp_tags[] = {
-
- const AVCodecTag ff_codec_bmp_tags_unofficial[] = {
- { AV_CODEC_ID_HEVC, MKTAG('H', 'E', 'V', 'C') },
-+ { AV_CODEC_ID_HEVC, MKTAG('H', '2', '6', '5') },
- { AV_CODEC_ID_NONE, 0 }
- };
-
---
-2.20.1 (Apple Git-117)
-
diff --git a/contrib/ffmpeg/A04-videotoolbox.patch b/contrib/ffmpeg/A04-videotoolbox.patch
deleted file mode 100644
index 387af0d66..000000000
--- a/contrib/ffmpeg/A04-videotoolbox.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-diff --git a/libavcodec/videotoolboxenc.c b/libavcodec/videotoolboxenc.c
-index 7796a68..e8b6245 100644
---- a/libavcodec/videotoolboxenc.c
-+++ b/libavcodec/videotoolboxenc.c
-@@ -866,6 +866,14 @@ static int get_cv_color_primaries(AVCodecContext *avctx,
- *primaries = NULL;
- break;
-
-+ case AVCOL_PRI_BT470BG:
-+ *primaries = kCVImageBufferColorPrimaries_EBU_3213;
-+ break;
-+
-+ case AVCOL_PRI_SMPTE170M:
-+ *primaries = kCVImageBufferColorPrimaries_SMPTE_C;
-+ break;
-+
- case AVCOL_PRI_BT709:
- *primaries = kCVImageBufferColorPrimaries_ITU_R_709_2;
- break;
-@@ -1302,6 +1310,7 @@ static av_cold int vtenc_init(AVCodecContext *avctx)
- vtctx->get_param_set_func = compat_keys.CMVideoFormatDescriptionGetHEVCParameterSetAtIndex;
- if (!vtctx->get_param_set_func) return AVERROR(EINVAL);
- if (!get_vt_hevc_profile_level(avctx, &profile_level)) return AVERROR(EINVAL);
-+ vtctx->has_b_frames = avctx->max_b_frames > 0;
- }
-
- vtctx->session = NULL;
diff --git a/contrib/ffmpeg/A12-FFmpeg-devel-Vulkan-support.patch b/contrib/ffmpeg/A12-FFmpeg-devel-Vulkan-support.patch
deleted file mode 100755
index 64c4b1132..000000000
--- a/contrib/ffmpeg/A12-FFmpeg-devel-Vulkan-support.patch
+++ /dev/null
@@ -1,78 +0,0 @@
-From patchwork Mon Sep 17 00:00:00 2001
-Content-Type: text/plain; charset="utf-8"
-MIME-Version: 1.0
-Content-Transfer-Encoding: 7bit
-Subject: [FFmpeg-devel] amfenc: Add support Vulkan encoder
-From: OvchinnikovDmitrii <[email protected]>
-Date: Wed, 5 Jun 2019 02:43:10 +0300
-Subject: [PATCH] patch.
-
-
----
- libavcodec/amfenc.c | 22 ++++++++++++++++++++--
- libavcodec/amfenc.h | 3 ++-
- 2 files changed, 22 insertions(+), 3 deletions(-)
-
-diff --git a/libavcodec/amfenc.c b/libavcodec/amfenc.c
-index 3be9ff9..04d21f2 100644
---- a/libavcodec/amfenc.c
-+++ b/libavcodec/amfenc.c
-@@ -234,6 +234,7 @@ static int amf_init_context(AVCodecContext *avctx)
- ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
-
- res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
-+ ctx->context1 = NULL;
- AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
-
- // If a device was passed to the encoder, try to initialise from that.
-@@ -311,8 +312,19 @@ static int amf_init_context(AVCodecContext *avctx)
- if (res == AMF_OK) {
- av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
- } else {
-- av_log(avctx, AV_LOG_ERROR, "AMF initialisation failed via D3D9: error %d.\n", res);
-- return AVERROR(ENOSYS);
-+ AMFGuid guid = IID_AMFContext1();
-+ res = ctx->context->pVtbl->QueryInterface(ctx->context, &guid, (void**)&ctx->context1);
-+ AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext1() failed with error %d\n", res);
-+
-+ res = ctx->context1->pVtbl->InitVulkan(ctx->context1, NULL);
-+ if (res != AMF_OK) {
-+ if (res == AMF_NOT_SUPPORTED)
-+ av_log(avctx, AV_LOG_ERROR, "AMF via Vulkan is not supported on the given device.\n");
-+ else
-+ av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given Vulkan device: %d.\n", res);
-+ return AVERROR(ENOSYS);
-+ }
-+ av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via Vulkan.\n");
- }
- }
- }
-@@ -373,6 +385,12 @@ int av_cold ff_amf_encode_close(AVCodecContext *avctx)
- ctx->context->pVtbl->Release(ctx->context);
- ctx->context = NULL;
- }
-+
-+ if (ctx->context1) {
-+ ctx->context1->pVtbl->Terminate(ctx->context1);
-+ ctx->context1->pVtbl->Release(ctx->context1);
-+ ctx->context1 = NULL;
-+ }
- av_buffer_unref(&ctx->hw_device_ctx);
- av_buffer_unref(&ctx->hw_frames_ctx);
-
-diff --git a/libavcodec/amfenc.h b/libavcodec/amfenc.h
-index b136184..ac2ed43 100644
---- a/libavcodec/amfenc.h
-+++ b/libavcodec/amfenc.h
-@@ -53,7 +53,8 @@ typedef struct AmfContext {
-
- amf_uint64 version; ///< version of AMF runtime
- AmfTraceWriter tracer; ///< AMF writer registered with AMF
-- AMFContext *context; ///< AMF context
-+ AMFContext *context; ///< AMF context
-+ AMFContext1 *context1;
- //encoder
- AMFComponent *encoder; ///< AMF encoder object
- amf_bool eof; ///< flag indicating EOF happened
---
-2.18.0.windows.1
diff --git a/contrib/ffmpeg/A12-videotoolbox-smpte2084-hlg-transfer.patch b/contrib/ffmpeg/A12-videotoolbox-smpte2084-hlg-transfer.patch
deleted file mode 100644
index 17b329e8c..000000000
--- a/contrib/ffmpeg/A12-videotoolbox-smpte2084-hlg-transfer.patch
+++ /dev/null
@@ -1,69 +0,0 @@
-From 391b67fcb58f95b5cc65b75070fd9a3dce4f8ac6 Mon Sep 17 00:00:00 2001
-From: Limin Wang <[email protected]>
-Date: Wed, 26 Jun 2019 18:57:59 +0800
-Subject: [PATCH] lavc/videotoolboxenc: add hdr10, linear, hlg color transfer
- function for videotoolboxenc
-
-Below is the testing ffmpeg command for the setting:
-./ffmpeg -i input.ts -c:v hevc_videotoolbox -color_primaries bt2020 -colorspace bt2020_ncl -color_trc smpte2084 smpte2048.ts
-./ffmpeg -i input.ts -c:v hevc_videotoolbox -color_primaries bt2020 -colorspace bt2020_ncl -color_trc linear linear.ts
-./ffmpeg -i input.ts -c:v hevc_videotoolbox -color_primaries bt2020 -colorspace bt2020_ncl -color_trc arib-std-b67 hlg.ts
-
-Signed-off-by: Limin Wang <[email protected]>
-Signed-off-by: Rick Kern <[email protected]>
----
- configure | 6 ++++++
- libavcodec/videotoolboxenc.c | 16 ++++++++++++++++
- 2 files changed, 22 insertions(+)
-
-diff --git a/configure b/configure
-index 34c2adb4a4f..3fb8f3521d2 100755
---- a/configure
-+++ b/configure
-@@ -2268,6 +2268,9 @@ TOOLCHAIN_FEATURES="
- TYPES_LIST="
- kCMVideoCodecType_HEVC
- kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
-+ kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ
-+ kCVImageBufferTransferFunction_ITU_R_2100_HLG
-+ kCVImageBufferTransferFunction_Linear
- socklen_t
- struct_addrinfo
- struct_group_source_req
-@@ -6069,6 +6072,9 @@ enabled videotoolbox && {
- check_lib coreservices CoreServices/CoreServices.h UTGetOSTypeFromString "-framework CoreServices"
- check_func_headers CoreMedia/CMFormatDescription.h kCMVideoCodecType_HEVC "-framework CoreMedia"
- check_func_headers CoreVideo/CVPixelBuffer.h kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange "-framework CoreVideo"
-+ check_func_headers CoreVideo/CVImageBuffer.h kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ "-framework CoreVideo"
-+ check_func_headers CoreVideo/CVImageBuffer.h kCVImageBufferTransferFunction_ITU_R_2100_HLG "-framework CoreVideo"
-+ check_func_headers CoreVideo/CVImageBuffer.h kCVImageBufferTransferFunction_Linear "-framework CoreVideo"
- }
-
- check_struct "sys/time.h sys/resource.h" "struct rusage" ru_maxrss
-diff --git a/libavcodec/videotoolboxenc.c b/libavcodec/videotoolboxenc.c
-index ff33c279c3a..ece9d6ea8fb 100644
---- a/libavcodec/videotoolboxenc.c
-+++ b/libavcodec/videotoolboxenc.c
-@@ -915,6 +915,22 @@ static int get_cv_transfer_function(AVCodecContext *avctx,
- *transfer_fnc = kCVImageBufferTransferFunction_SMPTE_240M_1995;
- break;
-
-+#if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_2084_PQ
-+ case AVCOL_TRC_SMPTE2084:
-+ *transfer_fnc = kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ;
-+ break;
-+#endif
-+#if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_LINEAR
-+ case AVCOL_TRC_LINEAR:
-+ *transfer_fnc = kCVImageBufferTransferFunction_Linear;
-+ break;
-+#endif
-+#if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
-+ case AVCOL_TRC_ARIB_STD_B67:
-+ *transfer_fnc = kCVImageBufferTransferFunction_ITU_R_2100_HLG;
-+ break;
-+#endif
-+
- case AVCOL_TRC_GAMMA22:
- gamma = 2.2;
- *transfer_fnc = kCVImageBufferTransferFunction_UseGamma;
diff --git a/contrib/ffmpeg/A13-qsv-dx11.patch b/contrib/ffmpeg/A13-qsv-dx11.patch
deleted file mode 100644
index 200faa48b..000000000
--- a/contrib/ffmpeg/A13-qsv-dx11.patch
+++ /dev/null
@@ -1,1063 +0,0 @@
-diff --git a/fftools/ffmpeg_opt.c b/fftools/ffmpeg_opt.c
-index f5ca18aa64..75d9b5669b 100644
---- a/fftools/ffmpeg_opt.c
-+++ b/fftools/ffmpeg_opt.c
-@@ -506,7 +506,17 @@ static int opt_init_hw_device(void *optctx, const char *opt, const char *arg)
- printf("\n");
- exit_program(0);
- } else {
-- return hw_device_init_from_string(arg, NULL);
-+ HWDevice *dev;
-+ int err;
-+ if (!arg)
-+ return AVERROR(ENOMEM);
-+ err = hw_device_init_from_string(arg, &dev);
-+ if (err < 0)
-+ return err;
-+ hw_device_ctx = av_buffer_ref(dev->device_ref);
-+ if (!hw_device_ctx)
-+ return AVERROR(ENOMEM);
-+ return 0;
- }
- }
-
-diff --git a/libavcodec/qsv.c b/libavcodec/qsv.c
-index 986d4f6022..2f01976be7 100644
---- a/libavcodec/qsv.c
-+++ b/libavcodec/qsv.c
-@@ -36,6 +36,8 @@
- #include "avcodec.h"
- #include "qsv_internal.h"
-
-+#define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
-+
- #if QSV_VERSION_ATLEAST(1, 12)
- #include "mfx/mfxvp8.h"
- #endif
-@@ -207,7 +209,7 @@ int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
- return ret;
- }
-
--static enum AVPixelFormat qsv_map_fourcc(uint32_t fourcc)
-+enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
- {
- switch (fourcc) {
- case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
-@@ -245,6 +247,24 @@ int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
- return AVERROR_BUG;
- }
-
-+enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
-+{
-+ enum AVFieldOrder field = AV_FIELD_UNKNOWN;
-+ switch (mfx_pic_struct & 0xF) {
-+ case MFX_PICSTRUCT_PROGRESSIVE:
-+ field = AV_FIELD_PROGRESSIVE;
-+ break;
-+ case MFX_PICSTRUCT_FIELD_TFF:
-+ field = AV_FIELD_TT;
-+ break;
-+ case MFX_PICSTRUCT_FIELD_BFF:
-+ field = AV_FIELD_BB;
-+ break;
-+ }
-+
-+ return field;
-+}
-+
- enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
- {
- enum AVPictureType type;
-@@ -328,8 +348,13 @@ load_plugin_fail:
- int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
- const char *load_plugins)
- {
-- mfxIMPL impl = MFX_IMPL_AUTO_ANY;
-- mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
-+#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
-+ mfxIMPL impl = MFX_IMPL_AUTO_ANY;
-+#else
-+ mfxIMPL impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
-+#endif
-+ mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
-+ mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
-
- const char *desc;
- int ret;
-@@ -406,6 +431,7 @@ static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
- for (i = 0; i < nb_surfaces; i++) {
- QSVMid *mid = &mids[i];
- mid->handle = frames_hwctx->surfaces[i].Data.MemId;
-+ mid->texture = frames_hwctx->texture;
- mid->hw_frames_ref = hw_frames_ref1;
- }
-
-@@ -500,7 +526,7 @@ static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
- frames_hwctx = frames_ctx->hwctx;
-
- frames_ctx->format = AV_PIX_FMT_QSV;
-- frames_ctx->sw_format = qsv_map_fourcc(i->FourCC);
-+ frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
- frames_ctx->width = i->Width;
- frames_ctx->height = i->Height;
- frames_ctx->initial_pool_size = req->NumFrameSuggested;
-@@ -615,7 +641,13 @@ static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
- static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
- {
- QSVMid *qsv_mid = (QSVMid*)mid;
-- *hdl = qsv_mid->handle;
-+ if (qsv_mid->texture) {
-+ mfxHDLPair *pair = (mfxHDLPair*)hdl;
-+ pair->first = qsv_mid->texture;
-+ pair->second = qsv_mid->handle;
-+ } else {
-+ *hdl = qsv_mid->handle;
-+ }
- return MFX_ERR_NONE;
- }
-
-@@ -630,12 +662,12 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
- AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
- AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
- mfxSession parent_session = device_hwctx->session;
-+ mfxHDL handle = NULL;
-+ mfxHandleType handle_type = MFX_IMPL_AUTO;
-
- mfxSession session;
- mfxVersion ver;
- mfxIMPL impl;
-- mfxHDL handle = NULL;
-- mfxHandleType handle_type;
- mfxStatus err;
-
- int i, ret;
-@@ -647,11 +679,26 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
- return ff_qsv_print_error(avctx, err,
- "Error querying the session attributes");
-
-+ if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl))
-+ {
-+ handle_type = MFX_HANDLE_D3D11_DEVICE;
-+ }
-+ else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl))
-+ {
-+ handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
-+ }
-+ else if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl))
-+ {
-+ handle_type = MFX_HANDLE_VA_DISPLAY;
-+ }
-+
- for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
-- err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
-- if (err == MFX_ERR_NONE) {
-- handle_type = handle_types[i];
-- break;
-+ if (handle_types[i] == handle_type)
-+ {
-+ err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
-+ if (err == MFX_ERR_NONE) {
-+ break;
-+ }
- }
- handle = NULL;
- }
-diff --git a/libavcodec/qsv_internal.h b/libavcodec/qsv_internal.h
-index b63a7d6a31..fe2cda4b22 100644
---- a/libavcodec/qsv_internal.h
-+++ b/libavcodec/qsv_internal.h
-@@ -49,6 +49,11 @@ typedef struct QSVMid {
- AVFrame *locked_frame;
- AVFrame *hw_frame;
- mfxFrameSurface1 surf;
-+ /**
-+ * ID3D11Texture2D texture in which the frame is located for D3D11VA device.
-+ * Null in case of DXVA2 device.
-+ */
-+ void *texture;
- } QSVMid;
-
- typedef struct QSVFrame {
-@@ -92,9 +97,13 @@ int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id);
- int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile);
- int ff_qsv_level_to_mfx(enum AVCodecID codec_id, int level);
-
-+enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc);
-+
- int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc);
- enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type);
-
-+enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct);
-+
- int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
- const char *load_plugins);
-
-diff --git a/libavcodec/qsvdec.c b/libavcodec/qsvdec.c
-index 46aa2d6814..7e48c833ab 100644
---- a/libavcodec/qsvdec.c
-+++ b/libavcodec/qsvdec.c
-@@ -147,19 +147,21 @@ static int check_dec_param(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *
- return 1;
- }
-
--static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
-+static int qsv_decode_preinit(AVCodecContext *avctx, QSVContext *q, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
- {
-- const AVPixFmtDescriptor *desc;
- mfxSession session = NULL;
- int iopattern = 0;
-- mfxVideoParam param = { 0 };
-- int frame_width = avctx->coded_width;
-- int frame_height = avctx->coded_height;
- int ret;
-+ enum AVPixelFormat pix_fmts[3] = {
-+ AV_PIX_FMT_QSV, /* opaque format in case of video memory output */
-+ pix_fmt, /* system memory format obtained from bitstream parser */
-+ AV_PIX_FMT_NONE };
-
-- desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
-- if (!desc)
-- return AVERROR_BUG;
-+ ret = ff_get_format(avctx, pix_fmts);
-+ if (ret < 0) {
-+ q->orig_pix_fmt = avctx->pix_fmt = AV_PIX_FMT_NONE;
-+ return ret;
-+ }
-
- if (!q->async_fifo) {
- q->async_fifo = av_fifo_alloc(q->async_depth * qsv_fifo_item_size());
-@@ -197,54 +199,72 @@ static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
- return ret;
- }
-
-- ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
-- if (ret < 0)
-- return ret;
-+ param->IOPattern = q->iopattern;
-+ param->AsyncDepth = q->async_depth;
-+ param->ExtParam = q->ext_buffers;
-+ param->NumExtParam = q->nb_ext_buffers;
-
-- param.mfx.CodecId = ret;
-- param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
-- param.mfx.CodecLevel = ff_qsv_level_to_mfx(avctx->codec_id, avctx->level);
--
-- param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
-- param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
-- param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
-- param.mfx.FrameInfo.FourCC = q->fourcc;
-- param.mfx.FrameInfo.Width = frame_width;
-- param.mfx.FrameInfo.Height = frame_height;
-- param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
--
-- switch (avctx->field_order) {
-- case AV_FIELD_PROGRESSIVE:
-- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
-- break;
-- case AV_FIELD_TT:
-- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
-- break;
-- case AV_FIELD_BB:
-- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
-- break;
-- default:
-- param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
-- break;
-- }
-+ return 0;
-+ }
-
-- param.IOPattern = q->iopattern;
-- param.AsyncDepth = q->async_depth;
-- param.ExtParam = q->ext_buffers;
-- param.NumExtParam = q->nb_ext_buffers;
-+static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *param)
-+{
-+ int ret;
-
-- if (!check_dec_param(avctx, q, &param)) {
-- //Just give a warning instead of an error since it is still decodable possibly.
-- av_log(avctx, AV_LOG_WARNING,
-- "Current input bitstream is not supported by QSV decoder.\n");
-- }
-+ avctx->width = param->mfx.FrameInfo.CropW;
-+ avctx->height = param->mfx.FrameInfo.CropH;
-+ avctx->coded_width = param->mfx.FrameInfo.Width;
-+ avctx->coded_height = param->mfx.FrameInfo.Height;
-+ avctx->level = param->mfx.CodecLevel;
-+ avctx->profile = param->mfx.CodecProfile;
-+ avctx->field_order = ff_qsv_map_picstruct(param->mfx.FrameInfo.PicStruct);
-+ avctx->pix_fmt = ff_qsv_map_fourcc(param->mfx.FrameInfo.FourCC);
-
-- ret = MFXVideoDECODE_Init(q->session, &param);
-+ ret = MFXVideoDECODE_Init(q->session, param);
- if (ret < 0)
- return ff_qsv_print_error(avctx, ret,
- "Error initializing the MFX video decoder");
-
-- q->frame_info = param.mfx.FrameInfo;
-+ q->frame_info = param->mfx.FrameInfo;
-+
-+ return 0;
-+}
-+
-+static int qsv_decode_header(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
-+{
-+ int ret;
-+
-+ mfxBitstream bs = { 0 };
-+
-+ if (avpkt->size) {
-+ bs.Data = avpkt->data;
-+ bs.DataLength = avpkt->size;
-+ bs.MaxLength = bs.DataLength;
-+ bs.TimeStamp = avpkt->pts;
-+ if (avctx->field_order == AV_FIELD_PROGRESSIVE)
-+ bs.DataFlag |= MFX_BITSTREAM_COMPLETE_FRAME;
-+ } else
-+ return AVERROR_INVALIDDATA;
-+
-+
-+ if(!q->session) {
-+ ret = qsv_decode_preinit(avctx, q, pix_fmt, param);
-+ if (ret < 0)
-+ return ret;
-+ }
-+
-+ ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
-+ if (ret < 0)
-+ return ret;
-+
-+ param->mfx.CodecId = ret;
-+ ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, param);
-+ if (MFX_ERR_MORE_DATA == ret) {
-+ return AVERROR(EAGAIN);
-+ }
-+ if (ret < 0)
-+ return ff_qsv_print_error(avctx, ret,
-+ "Error decoding stream header");
-
- return 0;
- }
-@@ -527,7 +547,8 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
- uint8_t *dummy_data;
- int dummy_size;
- int ret;
-- const AVPixFmtDescriptor *desc;
-+ mfxVideoParam param = { 0 };
-+ enum AVPixelFormat pix_fmt = AV_PIX_FMT_NV12;
-
- if (!q->avctx_internal) {
- q->avctx_internal = avcodec_alloc_context3(NULL);
-@@ -541,7 +562,6 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
- return AVERROR(ENOMEM);
-
- q->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
-- q->orig_pix_fmt = AV_PIX_FMT_NONE;
- }
-
- if (!pkt->size)
-@@ -554,15 +574,23 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
- pkt->data, pkt->size, pkt->pts, pkt->dts,
- pkt->pos);
-
-- avctx->field_order = q->parser->field_order;
- /* TODO: flush delayed frames on reinit */
-- if (q->parser->format != q->orig_pix_fmt ||
-- FFALIGN(q->parser->coded_width, 16) != FFALIGN(avctx->coded_width, 16) ||
-- FFALIGN(q->parser->coded_height, 16) != FFALIGN(avctx->coded_height, 16)) {
-- enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
-- AV_PIX_FMT_NONE,
-- AV_PIX_FMT_NONE };
-- enum AVPixelFormat qsv_format;
-+
-+ // sw_pix_fmt, coded_width/height should be set for ff_get_format(),
-+ // assume sw_pix_fmt is NV12 and coded_width/height to be 1280x720,
-+ // the assumption may be not corret but will be updated after header decoded if not true.
-+ if (q->orig_pix_fmt != AV_PIX_FMT_NONE)
-+ pix_fmt = q->orig_pix_fmt;
-+ if (!avctx->coded_width)
-+ avctx->coded_width = 1280;
-+ if (!avctx->coded_height)
-+ avctx->coded_height = 720;
-+
-+ ret = qsv_decode_header(avctx, q, pkt, pix_fmt, &param);
-+
-+ if (ret >= 0 && (q->orig_pix_fmt != ff_qsv_map_fourcc(param.mfx.FrameInfo.FourCC) ||
-+ avctx->coded_width != param.mfx.FrameInfo.Width ||
-+ avctx->coded_height != param.mfx.FrameInfo.Height)) {
- AVPacket zero_pkt = {0};
-
- if (q->buffered_count) {
-@@ -571,45 +599,24 @@ int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
- q->buffered_count--;
- return qsv_decode(avctx, q, frame, got_frame, &zero_pkt);
- }
--
- q->reinit_flag = 0;
-
-- qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc);
-- if (qsv_format < 0) {
-- av_log(avctx, AV_LOG_ERROR,
-- "Decoding pixel format '%s' is not supported\n",
-- av_get_pix_fmt_name(q->parser->format));
-- ret = AVERROR(ENOSYS);
-- goto reinit_fail;
-- }
-+ q->orig_pix_fmt = avctx->pix_fmt = pix_fmt = ff_qsv_map_fourcc(param.mfx.FrameInfo.FourCC);
-
-- q->orig_pix_fmt = q->parser->format;
-- avctx->pix_fmt = pix_fmts[1] = qsv_format;
-- avctx->width = q->parser->width;
-- avctx->height = q->parser->height;
-- avctx->coded_width = FFALIGN(q->parser->coded_width, 16);
-- avctx->coded_height = FFALIGN(q->parser->coded_height, 16);
-- avctx->level = q->avctx_internal->level;
-- avctx->profile = q->avctx_internal->profile;
-+ avctx->coded_width = param.mfx.FrameInfo.Width;
-+ avctx->coded_height = param.mfx.FrameInfo.Height;
-
-- ret = ff_get_format(avctx, pix_fmts);
-+ ret = qsv_decode_preinit(avctx, q, pix_fmt, &param);
- if (ret < 0)
- goto reinit_fail;
-+ q->initialized = 0;
-+ }
-
-- avctx->pix_fmt = ret;
--
-- desc = av_pix_fmt_desc_get(avctx->pix_fmt);
-- if (!desc)
-- goto reinit_fail;
--
-- if (desc->comp[0].depth > 8) {
-- avctx->coded_width = FFALIGN(q->parser->coded_width, 32);
-- avctx->coded_height = FFALIGN(q->parser->coded_height, 32);
-- }
--
-- ret = qsv_decode_init(avctx, q);
-+ if (!q->initialized) {
-+ ret = qsv_decode_init(avctx, q, &param);
- if (ret < 0)
- goto reinit_fail;
-+ q->initialized = 1;
- }
-
- return qsv_decode(avctx, q, frame, got_frame, pkt);
-@@ -622,4 +629,5 @@ reinit_fail:
- void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
- {
- q->orig_pix_fmt = AV_PIX_FMT_NONE;
-+ q->initialized = 0;
- }
-diff --git a/libavcodec/qsvdec.h b/libavcodec/qsvdec.h
-index 111536caba..4812fb2a6b 100644
---- a/libavcodec/qsvdec.h
-+++ b/libavcodec/qsvdec.h
-@@ -63,6 +63,8 @@ typedef struct QSVContext {
- uint32_t fourcc;
- mfxFrameInfo frame_info;
-
-+ int initialized;
-+
- // options set by the caller
- int async_depth;
- int iopattern;
-diff --git a/libavcodec/qsvdec_h2645.c b/libavcodec/qsvdec_h2645.c
-index 9b49f5506e..eb1dc336a4 100644
---- a/libavcodec/qsvdec_h2645.c
-+++ b/libavcodec/qsvdec_h2645.c
-@@ -103,6 +103,7 @@ static av_cold int qsv_decode_init(AVCodecContext *avctx)
- }
- }
-
-+ s->qsv.orig_pix_fmt = AV_PIX_FMT_NV12;
- s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
- if (!s->packet_fifo) {
- ret = AVERROR(ENOMEM);
-diff --git a/libavcodec/qsvdec_other.c b/libavcodec/qsvdec_other.c
-index 03251d2c85..a6f1b88ca0 100644
---- a/libavcodec/qsvdec_other.c
-+++ b/libavcodec/qsvdec_other.c
-@@ -90,6 +90,7 @@ static av_cold int qsv_decode_init(AVCodecContext *avctx)
- }
- #endif
-
-+ s->qsv.orig_pix_fmt = AV_PIX_FMT_NV12;
- s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
- if (!s->packet_fifo) {
- ret = AVERROR(ENOMEM);
-diff --git a/libavfilter/qsvvpp.c b/libavfilter/qsvvpp.c
-index 8d5ff2eb65..61135541e5 100644
---- a/libavfilter/qsvvpp.c
-+++ b/libavfilter/qsvvpp.c
-@@ -32,10 +32,11 @@
- #include "qsvvpp.h"
- #include "video.h"
-
--#define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
-+#define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
- MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
--#define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
--#define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
-+#define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
-+#define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
-+#define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
-
- typedef struct QSVFrame {
- AVFrame *frame;
-@@ -405,12 +406,12 @@ static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
- AVFilterLink *outlink = avctx->outputs[0];
- AVQSVFramesContext *in_frames_hwctx = NULL;
- AVQSVFramesContext *out_frames_hwctx = NULL;
-+ mfxHandleType handle_type = MFX_IMPL_AUTO;
-
- AVBufferRef *device_ref;
- AVHWDeviceContext *device_ctx;
- AVQSVDeviceContext *device_hwctx;
- mfxHDL handle;
-- mfxHandleType handle_type;
- mfxVersion ver;
- mfxIMPL impl;
- int ret, i;
-@@ -497,15 +498,30 @@ static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
- return AVERROR_UNKNOWN;
- }
-
-+ if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl))
-+ {
-+ handle_type = MFX_HANDLE_D3D11_DEVICE;
-+ }
-+ else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl))
-+ {
-+ handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
-+ }
-+ else if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl))
-+ {
-+ handle_type = MFX_HANDLE_VA_DISPLAY;
-+ }
-+
- for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
-- ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
-- if (ret == MFX_ERR_NONE) {
-- handle_type = handle_types[i];
-- break;
-+ if (handle_types[i] == handle_type)
-+ {
-+ ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
-+ if (ret == MFX_ERR_NONE) {
-+ break;
-+ }
- }
-+ handle = NULL;
- }
--
-- if (ret != MFX_ERR_NONE) {
-+ if (!handle) {
- av_log(avctx, AV_LOG_ERROR, "Error getting the session handle\n");
- return AVERROR_UNKNOWN;
- }
-diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
-index 6670c47579..34e1ece437 100644
---- a/libavutil/hwcontext_d3d11va.c
-+++ b/libavutil/hwcontext_d3d11va.c
-@@ -510,15 +510,57 @@ static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
- }
- }
-
-+static int d3d11va_device_find_adapter_by_vendor_id(AVHWDeviceContext *ctx, UINT creationFlags, char *vendor)
-+{
-+ HRESULT hr;
-+ IDXGIAdapter *adapter = NULL;
-+ int adapter_id = 0;
-+ IDXGIFactory2 *factory;
-+ long int vendor_id = strtol(vendor, NULL, 0);
-+ hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&factory);
-+ while (IDXGIFactory2_EnumAdapters(factory, adapter_id++, &adapter) != DXGI_ERROR_NOT_FOUND)
-+ {
-+ ID3D11Device* device = NULL;
-+ DXGI_ADAPTER_DESC adapter_desc;
-+
-+ hr = mD3D11CreateDevice(adapter, D3D_DRIVER_TYPE_UNKNOWN, NULL, creationFlags, NULL, 0, D3D11_SDK_VERSION, &device, NULL, NULL);
-+ if (FAILED(hr)) {
-+ av_log(ctx, AV_LOG_ERROR, "D3D11CreateDevice returned error\n");
-+ continue;
-+ }
-+
-+ hr = IDXGIAdapter2_GetDesc(adapter, &adapter_desc);
-+ if (FAILED(hr)) {
-+ av_log(ctx, AV_LOG_ERROR, "IDXGIAdapter2_GetDesc returned error\n");
-+ continue;
-+ }
-+
-+ if (device)
-+ ID3D11Device_Release(device);
-+
-+ if (adapter)
-+ IDXGIAdapter_Release(adapter);
-+
-+ if (adapter_desc.VendorId == vendor_id) {
-+ IDXGIFactory2_Release(factory);
-+ return adapter_id - 1;
-+ }
-+ }
-+ IDXGIFactory2_Release(factory);
-+ return -1;
-+}
-+
- static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
- AVDictionary *opts, int flags)
- {
- AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
-
- HRESULT hr;
-+ AVDictionaryEntry *e;
- IDXGIAdapter *pAdapter = NULL;
- ID3D10Multithread *pMultithread;
- UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
-+ int adapter = -1;
- int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
- int ret;
-
-@@ -538,11 +580,23 @@ static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
- return AVERROR_UNKNOWN;
- }
-
-+ e = av_dict_get(opts, "vendor", NULL, 0);
-+ if (e) {
-+ adapter = d3d11va_device_find_adapter_by_vendor_id(ctx, creationFlags, e ? e->value : NULL);
-+ if (adapter < 0) {
-+ av_log(ctx, AV_LOG_ERROR, "Failed to find d3d11va adapter by vendor id\n");
-+ return AVERROR_UNKNOWN;
-+ }
-+ }
-+
- if (device) {
-+ adapter = atoi(device);
-+ }
-+
-+ if (adapter >= 0) {
- IDXGIFactory2 *pDXGIFactory;
- hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
- if (SUCCEEDED(hr)) {
-- int adapter = atoi(device);
- if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
- pAdapter = NULL;
- IDXGIFactory2_Release(pDXGIFactory);
-diff --git a/libavutil/hwcontext_qsv.c b/libavutil/hwcontext_qsv.c
-index 59e4ed9157..270ef976d7 100644
---- a/libavutil/hwcontext_qsv.c
-+++ b/libavutil/hwcontext_qsv.c
-@@ -27,9 +27,13 @@
- #include <pthread.h>
- #endif
-
-+#define COBJMACROS
- #if CONFIG_VAAPI
- #include "hwcontext_vaapi.h"
- #endif
-+#if CONFIG_D3D11VA
-+#include "hwcontext_d3d11va.h"
-+#endif
- #if CONFIG_DXVA2
- #include "hwcontext_dxva2.h"
- #endif
-@@ -44,6 +48,8 @@
- #include "pixdesc.h"
- #include "time.h"
-
-+#define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
-+
- typedef struct QSVDevicePriv {
- AVBufferRef *child_device_ctx;
- } QSVDevicePriv;
-@@ -89,6 +95,9 @@ static const struct {
- #if CONFIG_VAAPI
- { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
- #endif
-+#if CONFIG_D3D11VA
-+ { MFX_HANDLE_D3D11_DEVICE, AV_HWDEVICE_TYPE_D3D11VA, AV_PIX_FMT_D3D11 },
-+#endif
- #if CONFIG_DXVA2
- { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
- #endif
-@@ -119,25 +128,11 @@ static int qsv_device_init(AVHWDeviceContext *ctx)
- {
- AVQSVDeviceContext *hwctx = ctx->hwctx;
- QSVDeviceContext *s = ctx->internal->priv;
-+ mfxHandleType handle_type = MFX_IMPL_AUTO;
-
- mfxStatus err;
- int i;
-
-- for (i = 0; supported_handle_types[i].handle_type; i++) {
-- err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
-- &s->handle);
-- if (err == MFX_ERR_NONE) {
-- s->handle_type = supported_handle_types[i].handle_type;
-- s->child_device_type = supported_handle_types[i].device_type;
-- s->child_pix_fmt = supported_handle_types[i].pix_fmt;
-- break;
-- }
-- }
-- if (!s->handle) {
-- av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
-- "from the session\n");
-- }
--
- err = MFXQueryIMPL(hwctx->session, &s->impl);
- if (err == MFX_ERR_NONE)
- err = MFXQueryVersion(hwctx->session, &s->ver);
-@@ -146,6 +141,36 @@ static int qsv_device_init(AVHWDeviceContext *ctx)
- return AVERROR_UNKNOWN;
- }
-
-+ if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl))
-+ {
-+ handle_type = MFX_HANDLE_D3D11_DEVICE;
-+ }
-+ else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl))
-+ {
-+ handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
-+ }
-+ else if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl))
-+ {
-+ handle_type = MFX_HANDLE_VA_DISPLAY;
-+ }
-+
-+ for (i = 0; supported_handle_types[i].handle_type; i++) {
-+ if (supported_handle_types[i].handle_type == handle_type) {
-+ err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
-+ &s->handle);
-+ if (err == MFX_ERR_NONE) {
-+ s->handle_type = supported_handle_types[i].handle_type;
-+ s->child_device_type = supported_handle_types[i].device_type;
-+ s->child_pix_fmt = supported_handle_types[i].pix_fmt;
-+ break;
-+ }
-+ }
-+ }
-+ if (!s->handle) {
-+ av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
-+ "from the session\n");
-+ }
-+
- return 0;
- }
-
-@@ -222,13 +247,19 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
- if (!child_device_ref)
- return AVERROR(ENOMEM);
- child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
--
- #if CONFIG_VAAPI
- if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
- AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
- child_device_hwctx->display = (VADisplay)device_priv->handle;
- }
- #endif
-+#if CONFIG_D3D11VA
-+ if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
-+ AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
-+ ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
-+ child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
-+ }
-+#endif
- #if CONFIG_DXVA2
- if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
- AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
-@@ -255,6 +286,16 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
- child_frames_ctx->width = FFALIGN(ctx->width, 16);
- child_frames_ctx->height = FFALIGN(ctx->height, 16);
-
-+#if CONFIG_D3D11VA
-+ if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
-+ AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
-+ child_frames_hwctx->MiscFlags |= D3D11_RESOURCE_MISC_SHARED;
-+ if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
-+ child_frames_hwctx->BindFlags = D3D11_BIND_RENDER_TARGET ;
-+ else
-+ child_frames_hwctx->BindFlags = D3D11_BIND_DECODER;
-+ }
-+#endif
- #if CONFIG_DXVA2
- if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
- AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
-@@ -279,6 +320,18 @@ static int qsv_init_child_ctx(AVHWFramesContext *ctx)
- hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
- }
- #endif
-+#if CONFIG_D3D11VA
-+ if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
-+ AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
-+ hwctx->texture = child_frames_hwctx->texture;
-+ for (i = 0; i < ctx->initial_pool_size; i++)
-+ s->surfaces_internal[i].Data.MemId = (mfxMemId)(int64_t)i;
-+ if (child_frames_hwctx->BindFlags == D3D11_BIND_DECODER)
-+ hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
-+ else
-+ hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
-+ }
-+#endif
- #if CONFIG_DXVA2
- if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
- AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
-@@ -421,7 +474,16 @@ static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
-
- static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
- {
-- *hdl = mid;
-+ AVHWFramesContext *ctx = pthis;
-+ AVQSVFramesContext *hwctx = ctx->hwctx;
-+
-+ if (hwctx->texture) {
-+ mfxHDLPair *pair = (mfxHDLPair*)hdl;
-+ pair->first = hwctx->texture;
-+ pair->second = mid;
-+ } else {
-+ *hdl = mid;
-+ }
- return MFX_ERR_NONE;
- }
-
-@@ -668,6 +730,11 @@ static int qsv_map_from(AVHWFramesContext *ctx,
- child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
- break;
- #endif
-+#if CONFIG_D3D11VA
-+ case AV_HWDEVICE_TYPE_D3D11VA:
-+ child_data = surf->Data.MemId;
-+ break;
-+#endif
- #if CONFIG_DXVA2
- case AV_HWDEVICE_TYPE_DXVA2:
- child_data = surf->Data.MemId;
-@@ -972,6 +1039,27 @@ static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
- }
- break;
- #endif
-+#if CONFIG_D3D11VA
-+ case AV_HWDEVICE_TYPE_D3D11VA:
-+ {
-+ AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
-+ s->surfaces_internal = av_mallocz_array(src_ctx->initial_pool_size,
-+ sizeof(*s->surfaces_internal));
-+ if (!s->surfaces_internal)
-+ return AVERROR(ENOMEM);
-+ dst_hwctx->texture = src_hwctx->texture;
-+ for (i = 0; i < src_ctx->initial_pool_size; i++) {
-+ qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
-+ s->surfaces_internal[i].Data.MemId = (mfxMemId)(int64_t)i;
-+ }
-+ dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
-+ if (src_hwctx->BindFlags == D3D11_BIND_DECODER)
-+ dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
-+ else
-+ dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
-+ }
-+ break;
-+#endif
- #if CONFIG_DXVA2
- case AV_HWDEVICE_TYPE_DXVA2:
- {
-@@ -1009,14 +1097,26 @@ static int qsv_map_to(AVHWFramesContext *dst_ctx,
-
- for (i = 0; i < hwctx->nb_surfaces; i++) {
- #if CONFIG_VAAPI
-- if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
-- (VASurfaceID)(uintptr_t)src->data[3])
-- break;
-+ if (AV_PIX_FMT_VAAPI == src->format) {
-+ if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
-+ (VASurfaceID)(uintptr_t)src->data[3])
-+ break;
-+ }
-+#endif
-+#if CONFIG_D3D11VA
-+ if (AV_PIX_FMT_D3D11 == src->format) {
-+ if ((hwctx->texture == (ID3D11Texture2D*)(uintptr_t)src->data[0]) &&
-+ ((ID3D11Texture2D*)hwctx->surfaces[i].Data.MemId ==
-+ (ID3D11Texture2D*)(uintptr_t)src->data[1]))
-+ break;
-+ }
- #endif
- #if CONFIG_DXVA2
-- if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
-- (IDirect3DSurface9*)(uintptr_t)src->data[3])
-- break;
-+ if (AV_PIX_FMT_DXVA2_VLD == src->format) {
-+ if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
-+ (IDirect3DSurface9*)(uintptr_t)src->data[3])
-+ break;
-+ }
- #endif
- }
- if (i >= hwctx->nb_surfaces) {
-@@ -1074,7 +1174,7 @@ static void qsv_device_free(AVHWDeviceContext *ctx)
- av_freep(&priv);
- }
-
--static mfxIMPL choose_implementation(const char *device)
-+static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
- {
- static const struct {
- const char *name;
-@@ -1103,6 +1203,10 @@ static mfxIMPL choose_implementation(const char *device)
- impl = strtol(device, NULL, 0);
- }
-
-+ if ( (child_device_type == AV_HWDEVICE_TYPE_D3D11VA) && (impl != MFX_IMPL_SOFTWARE) ) {
-+ impl |= MFX_IMPL_VIA_D3D11;
-+ }
-+
- return impl;
- }
-
-@@ -1129,6 +1233,15 @@ static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
- }
- break;
- #endif
-+#if CONFIG_D3D11VA
-+ case AV_HWDEVICE_TYPE_D3D11VA:
-+ {
-+ AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
-+ handle_type = MFX_HANDLE_D3D11_DEVICE;
-+ handle = (mfxHDL)child_device_hwctx->device;
-+ }
-+ break;
-+#endif
- #if CONFIG_DXVA2
- case AV_HWDEVICE_TYPE_DXVA2:
- {
-@@ -1196,8 +1309,10 @@ fail:
- static int qsv_device_derive(AVHWDeviceContext *ctx,
- AVHWDeviceContext *child_device_ctx, int flags)
- {
-- return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
-- child_device_ctx, flags);
-+ mfxIMPL impl;
-+ impl = choose_implementation("hw_any", child_device_ctx->type);
-+ return qsv_device_derive_from_child(ctx, impl,
-+ child_device_ctx, flags);
- }
-
- static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
-@@ -1219,25 +1334,63 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
- ctx->user_opaque = priv;
- ctx->free = qsv_device_free;
-
-- e = av_dict_get(opts, "child_device", NULL, 0);
--
-- child_device_opts = NULL;
-- if (CONFIG_VAAPI) {
-+ e = av_dict_get(opts, "child_device_type", NULL, 0);
-+ if (e) {
-+ child_device_type = av_hwdevice_find_type_by_name(e ? e->value : NULL);
-+ if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
-+ av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
-+ "\"%s\".\n", e ? e->value : NULL);
-+ return AVERROR(EINVAL);
-+ }
-+ } else if (CONFIG_VAAPI) {
- child_device_type = AV_HWDEVICE_TYPE_VAAPI;
-- // libmfx does not actually implement VAAPI properly, rather it
-- // depends on the specific behaviour of a matching iHD driver when
-- // used on recent Intel hardware. Set options to the VAAPI device
-- // creation so that we should pick a usable setup by default if
-- // possible, even when multiple devices and drivers are available.
-- av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
-- av_dict_set(&child_device_opts, "driver", "iHD", 0);
-- } else if (CONFIG_DXVA2)
-+ } else if (CONFIG_DXVA2) {
- child_device_type = AV_HWDEVICE_TYPE_DXVA2;
-- else {
-+ } else if (CONFIG_D3D11VA) {
-+ child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
-+ } else {
- av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
- return AVERROR(ENOSYS);
- }
-
-+ child_device_opts = NULL;
-+ switch (child_device_type) {
-+#if CONFIG_VAAPI
-+ case AV_HWDEVICE_TYPE_VAAPI:
-+ {
-+ // libmfx does not actually implement VAAPI properly, rather it
-+ // depends on the specific behaviour of a matching iHD driver when
-+ // used on recent Intel hardware. Set options to the VAAPI device
-+ // creation so that we should pick a usable setup by default if
-+ // possible, even when multiple devices and drivers are available.
-+ av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
-+ av_dict_set(&child_device_opts, "driver", "iHD", 0);
-+ }
-+ break;
-+#endif
-+#if CONFIG_D3D11VA
-+ case AV_HWDEVICE_TYPE_D3D11VA:
-+ {
-+ av_log(ctx, AV_LOG_VERBOSE,"AV_HWDEVICE_TYPE_D3D11VA has been selected\n");
-+ }
-+ break;
-+#endif
-+#if CONFIG_DXVA2
-+ case AV_HWDEVICE_TYPE_DXVA2:
-+ {
-+ av_log(ctx, AV_LOG_VERBOSE,"AV_HWDEVICE_TYPE_DXVA2 has been selected\n");
-+ }
-+ break;
-+#endif
-+ default:
-+ {
-+ av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
-+ return AVERROR(ENOSYS);
-+ }
-+ break;
-+ }
-+
-+ e = av_dict_get(opts, "child_device", NULL, 0);
- ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
- e ? e->value : NULL, child_device_opts, 0);
- if (ret < 0)
-@@ -1245,7 +1398,7 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
-
- child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
-
-- impl = choose_implementation(device);
-+ impl = choose_implementation(device, child_device_type);
-
- return qsv_device_derive_from_child(ctx, impl, child_device, 0);
- }
-diff --git a/libavutil/hwcontext_qsv.h b/libavutil/hwcontext_qsv.h
-index b98d611cfc..d21d58eb61 100644
---- a/libavutil/hwcontext_qsv.h
-+++ b/libavutil/hwcontext_qsv.h
-@@ -34,6 +34,15 @@
- */
- typedef struct AVQSVDeviceContext {
- mfxSession session;
-+ /**
-+ * Need to store actual handle type that session uses
-+ * MFXVideoCORE_GetHandle() function returns mfxHandleType
-+ * always equal to MFX_HANDLE_D3D9_DEVICE_MANAGER
-+ * even when MFX_HANDLE_D3D11_DEVICE was set as handle before by
-+ * MFXVideoCORE_SetHandle() to mfx session.
-+ * Fixed already but will be available only with latest driver.
-+ */
-+ //mfxHandleType handle_type;
- } AVQSVDeviceContext;
-
- /**
-@@ -42,11 +51,11 @@ typedef struct AVQSVDeviceContext {
- typedef struct AVQSVFramesContext {
- mfxFrameSurface1 *surfaces;
- int nb_surfaces;
--
- /**
- * A combination of MFX_MEMTYPE_* describing the frame pool.
- */
- int frame_type;
-+ void *texture;
- } AVQSVFramesContext;
-
- #endif /* AVUTIL_HWCONTEXT_QSV_H */
diff --git a/contrib/ffmpeg/A19-matroskaenc-aac-extradata-updated.patch b/contrib/ffmpeg/A19-matroskaenc-aac-extradata-updated.patch
new file mode 100644
index 000000000..8916a79ea
--- /dev/null
+++ b/contrib/ffmpeg/A19-matroskaenc-aac-extradata-updated.patch
@@ -0,0 +1,24 @@
+Updated version of patch by John Stebbins <[email protected]>
+[PATCH] avformat/matroskaenc: return error if aac extradata changes
+extradata changing would result in an invalid stream.
+also, as the code was written, rewriting extradata was corrupting the
+track header resulting in an invalid file.
+--- ffmpeg/libavformat/matroskaenc.c.orig 2020-06-18 01:03:40.000000000 -0400
++++ ffmpeg/libavformat/matroskaenc.c 2020-06-18 01:30:46.000000000 -0400
+@@ -2193,6 +2193,16 @@
+ case AV_CODEC_ID_AAC:
+ if (side_data_size && mkv->track.bc) {
+ int filler, output_sample_rate = 0;
++ if (par->extradata && par->extradata_size) {
++ if (par->extradata_size != side_data_size ||
++ memcmp(par->extradata, side_data, side_data_size)) {
++ av_log(s, AV_LOG_ERROR, "Error, AAC extradata changed mid-stream.\n");
++ return AVERROR(EINVAL);
++ } else {
++ // Already written
++ break;
++ }
++ }
+ ret = get_aac_sample_rates(s, mkv, side_data, side_data_size,
+ &track->sample_rate, &output_sample_rate);
+ if (ret < 0)
diff --git a/contrib/ffmpeg/A19-matroskaenc-aac-extradata.patch b/contrib/ffmpeg/A19-matroskaenc-aac-extradata.patch
deleted file mode 100644
index 73a406292..000000000
--- a/contrib/ffmpeg/A19-matroskaenc-aac-extradata.patch
+++ /dev/null
@@ -1,36 +0,0 @@
-From a3fa88bdfd6c9615ea9b9f64caf722fa2292d29a Mon Sep 17 00:00:00 2001
-From: John Stebbins <[email protected]>
-Date: Sat, 2 May 2020 07:26:29 -0600
-Subject: [PATCH] avformat/matroskaenc: return error if aac extradata changes
-
-extradata changing would result in an invalid stream.
-also, as the code was written, rewriting extradata was corrupting the
-track header resulting in an invalid file.
-
-(cherry picked from commit 805890cddee93ebafcf6db8784315aa3fd6faf81)
----
- libavformat/matroskaenc.c | 8 ++++++++
- 1 file changed, 8 insertions(+)
-
-diff --git a/libavformat/matroskaenc.c b/libavformat/matroskaenc.c
-index cef504fa05..ea7f81ab94 100644
---- a/libavformat/matroskaenc.c
-+++ b/libavformat/matroskaenc.c
-@@ -2283,6 +2283,14 @@ static int mkv_check_new_extra_data(AVFormatContext *s, AVPacket *pkt)
- if (side_data_size && (s->pb->seekable & AVIO_SEEKABLE_NORMAL) && !mkv->is_live) {
- int filler, output_sample_rate = 0;
- int64_t curpos;
-+ if (par->extradata && par->extradata_size)
-+ if (par->extradata_size != side_data_size ||
-+ memcmp(par->extradata, side_data, side_data_size)) {
-+ av_log(s, AV_LOG_ERROR, "Error, AAC extradata changed mid-stream.\n");
-+ return AVERROR(EINVAL);
-+ } else // Already written
-+ break;
-+
- ret = get_aac_sample_rates(s, side_data, side_data_size, &track->sample_rate,
- &output_sample_rate);
- if (ret < 0)
---
-2.25.4
-
diff --git a/contrib/ffmpeg/module.defs b/contrib/ffmpeg/module.defs
index 8733e954d..05896f751 100644
--- a/contrib/ffmpeg/module.defs
+++ b/contrib/ffmpeg/module.defs
@@ -12,9 +12,9 @@ endif
$(eval $(call import.MODULE.defs,FFMPEG,ffmpeg,$(__deps__)))
$(eval $(call import.CONTRIB.defs,FFMPEG))
-FFMPEG.FETCH.url = https://github.com/HandBrake/HandBrake-contribs/releases/download/contribs/ffmpeg-4.2.3.tar.bz2
-FFMPEG.FETCH.url += https://ffmpeg.org/releases/ffmpeg-4.2.3.tar.bz2
-FFMPEG.FETCH.sha256 = 217eb211c33303b37c5521a5abe1f0140854d6810c6a6ee399456cc96356795e
+FFMPEG.FETCH.url = https://github.com/HandBrake/HandBrake-contribs/releases/download/contribs/ffmpeg-4.3.tar.bz2
+FFMPEG.FETCH.url += https://ffmpeg.org/releases/ffmpeg-4.3.tar.bz2
+FFMPEG.FETCH.sha256 = a7e87112fc49ad5b59e26726e3a7cae0ffae511cba5376c579ba3cb04483d6e2
FFMPEG.CONFIGURE.deps =
FFMPEG.CONFIGURE.host =
@@ -137,7 +137,7 @@ endif
ifeq (1,$(FEATURE.nvenc))
FFMPEG.CONFIGURE.extra += \
- --enable-nvenc \
+ --enable-ffnvcodec \
--enable-encoder=h264_nvenc \
--enable-encoder=hevc_nvenc
endif