@@ -302,6 +302,7 @@ External library support:
--enable-opengl enable OpenGL rendering [no]
--enable-openssl enable openssl, needed for https support
if gnutls, libtls or mbedtls is not used [no]
+ --enable-vulkan enable Vulkan code [no]
--disable-sndio disable sndio support [autodetect]
--disable-schannel disable SChannel SSP, needed for TLS support on
Windows if openssl and gnutls are not used [autodetect]
@@ -1770,6 +1771,7 @@ HWACCEL_LIBRARY_LIST="
mmal
omx
opencl
+ vulkan
"
DOCUMENT_LIST="
@@ -2226,6 +2228,7 @@ HAVE_LIST="
opencl_dxva2
opencl_vaapi_beignet
opencl_vaapi_intel_media
+ vulkan_drm_mod
perl
pod2man
texi2html
@@ -6357,6 +6360,13 @@ enabled vdpau &&
enabled crystalhd && check_lib crystalhd "stdint.h libcrystalhd/libcrystalhd_if.h" DtsCrystalHDVersion -lcrystalhd
+enabled vulkan &&
+ require_pkg_config vulkan "vulkan >= 1.1.73" "vulkan/vulkan.h" vkCreateInstance
+
+if enabled_all vulkan libdrm ; then
+ check_cpp_condition vulkan_drm_mod vulkan/vulkan.h "defined VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME"
+fi
+
if enabled x86; then
case $target_os in
mingw32*|mingw64*|win32|win64|linux|cygwin*)
@@ -15,6 +15,10 @@ libavutil: 2017-10-21
API changes, most recent first:
+2018-04-xx - xxxxxxxxxx - lavu 56.19.100 - hwcontext.h
+ Add AV_PIX_FMT_VULKAN
+ Add AV_HWDEVICE_TYPE_VULKAN and implementation.
+
2018-05-xx - xxxxxxxxxx - lavf 58.15.100 - avformat.h
Add pmt_version field to AVProgram
@@ -42,6 +42,7 @@ HEADERS = adler32.h \
hwcontext_vaapi.h \
hwcontext_videotoolbox.h \
hwcontext_vdpau.h \
+ hwcontext_vulkan.h \
imgutils.h \
intfloat.h \
intreadwrite.h \
@@ -168,6 +169,7 @@ OBJS-$(CONFIG_QSV) += hwcontext_qsv.o
OBJS-$(CONFIG_VAAPI) += hwcontext_vaapi.o
OBJS-$(CONFIG_VIDEOTOOLBOX) += hwcontext_videotoolbox.o
OBJS-$(CONFIG_VDPAU) += hwcontext_vdpau.o
+OBJS-$(CONFIG_VULKAN) += hwcontext_vulkan.o
OBJS += $(COMPAT_OBJS:%=../compat/%)
@@ -183,6 +185,7 @@ SKIPHEADERS-$(CONFIG_OPENCL) += hwcontext_opencl.h
SKIPHEADERS-$(CONFIG_VAAPI) += hwcontext_vaapi.h
SKIPHEADERS-$(CONFIG_VIDEOTOOLBOX) += hwcontext_videotoolbox.h
SKIPHEADERS-$(CONFIG_VDPAU) += hwcontext_vdpau.h
+SKIPHEADERS-$(CONFIG_VULKAN) += hwcontext_vulkan.h
TESTPROGS = adler32 \
aes \
@@ -58,6 +58,9 @@ static const HWContextType * const hw_table[] = {
#endif
#if CONFIG_MEDIACODEC
&ff_hwcontext_type_mediacodec,
+#endif
+#if CONFIG_VULKAN
+ &ff_hwcontext_type_vulkan,
#endif
NULL,
};
@@ -73,6 +76,7 @@ static const char *const hw_type_names[] = {
[AV_HWDEVICE_TYPE_VDPAU] = "vdpau",
[AV_HWDEVICE_TYPE_VIDEOTOOLBOX] = "videotoolbox",
[AV_HWDEVICE_TYPE_MEDIACODEC] = "mediacodec",
+ [AV_HWDEVICE_TYPE_VULKAN] = "vulkan",
};
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
@@ -36,6 +36,7 @@ enum AVHWDeviceType {
AV_HWDEVICE_TYPE_DRM,
AV_HWDEVICE_TYPE_OPENCL,
AV_HWDEVICE_TYPE_MEDIACODEC,
+ AV_HWDEVICE_TYPE_VULKAN,
};
typedef struct AVHWDeviceInternal AVHWDeviceInternal;
@@ -172,5 +172,6 @@ extern const HWContextType ff_hwcontext_type_vaapi;
extern const HWContextType ff_hwcontext_type_vdpau;
extern const HWContextType ff_hwcontext_type_videotoolbox;
extern const HWContextType ff_hwcontext_type_mediacodec;
+extern const HWContextType ff_hwcontext_type_vulkan;
#endif /* AVUTIL_HWCONTEXT_INTERNAL_H */
new file mode 100644
@@ -0,0 +1,2225 @@
+/*
+ * Vulkan hwcontext
+ * Copyright (c) 2018 Rostislav Pehlivanov <atomnuker@gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "config.h"
+#include "pixdesc.h"
+#include "avstring.h"
+#include "hwcontext.h"
+#include "hwcontext_internal.h"
+#include "hwcontext_vulkan.h"
+
+#if CONFIG_LIBDRM
+#include <unistd.h> /* lseek */
+#include <xf86drm.h>
+#include <drm_fourcc.h>
+#include "hwcontext_drm.h"
+#ifndef DRM_FORMAT_MOD_INVALID
+#define DRM_FORMAT_MOD_INVALID ((1ULL << 56) - 1)
+#endif
+#if CONFIG_VAAPI
+#include <va/va_drmcommon.h>
+#include "hwcontext_vaapi.h"
+#endif
+#endif
+
+typedef struct VulkanDevicePriv {
+ /* Properties */
+ VkPhysicalDeviceProperties props;
+ VkPhysicalDeviceMemoryProperties mprops;
+
+ /* Debug callback */
+ VkDebugUtilsMessengerEXT debug_ctx;
+
+ /* Image uploading */
+ VkCommandPool cmd_pool;
+ VkCommandBuffer cmd_buf;
+ VkQueue cmd_queue;
+ VkFence cmd_fence;
+
+ /* Extensions */
+ uint64_t extensions;
+
+ /* Settings */
+ int use_linear_images;
+ int use_disjoint_images;
+} VulkanDevicePriv;
+
+#define VK_LOAD_PFN(inst, name) PFN_##name pfn_##name = (PFN_##name) \
+ vkGetInstanceProcAddr(inst, #name)
+
+#define DEFAULT_USAGE_FLAGS (VK_IMAGE_USAGE_SAMPLED_BIT | \
+ VK_IMAGE_USAGE_STORAGE_BIT | \
+ VK_IMAGE_USAGE_TRANSFER_SRC_BIT | \
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT)
+
+#define ADD_VAL_TO_LIST(list, count, val) \
+ do { \
+ list = av_realloc_array(list, sizeof(*list), ++count); \
+ if (!list) { \
+ err = AVERROR(ENOMEM); \
+ goto end; \
+ } \
+ list[count - 1] = val; \
+ } while(0)
+
+static const VkFormat vk_format_map[AV_PIX_FMT_NB] = {
+ /* Gray */
+ [AV_PIX_FMT_GRAY8] = VK_FORMAT_R8_UNORM,
+ [AV_PIX_FMT_GRAY16] = VK_FORMAT_R16_UNORM,
+
+ /* Interleaved */
+ [AV_PIX_FMT_NV12] = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+ [AV_PIX_FMT_P010] = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+ [AV_PIX_FMT_P016] = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+ [AV_PIX_FMT_NV16] = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+ [AV_PIX_FMT_UYVY422] = VK_FORMAT_B16G16R16G16_422_UNORM,
+ [AV_PIX_FMT_YUYV422] = VK_FORMAT_G16B16G16R16_422_UNORM,
+
+ /* 420 */
+ [AV_PIX_FMT_YUV420P] = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+ [AV_PIX_FMT_YUV420P16] = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+
+ /* 422 */
+ [AV_PIX_FMT_YUV422P] = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+ [AV_PIX_FMT_YUV422P16] = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+
+ /* 444 */
+ [AV_PIX_FMT_YUV444P] = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+ [AV_PIX_FMT_YUV444P16] = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+
+ /* RGB */
+ [AV_PIX_FMT_ABGR] = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+ [AV_PIX_FMT_BGRA] = VK_FORMAT_B8G8R8A8_UNORM,
+ [AV_PIX_FMT_RGBA] = VK_FORMAT_R8G8B8A8_UNORM,
+ [AV_PIX_FMT_RGB24] = VK_FORMAT_R8G8B8_UNORM,
+ [AV_PIX_FMT_BGR24] = VK_FORMAT_B8G8R8_UNORM,
+ [AV_PIX_FMT_RGB48] = VK_FORMAT_R16G16B16_UNORM,
+ [AV_PIX_FMT_RGBA64] = VK_FORMAT_R16G16B16A16_UNORM,
+ [AV_PIX_FMT_RGB565] = VK_FORMAT_R5G6B5_UNORM_PACK16,
+ [AV_PIX_FMT_BGR565] = VK_FORMAT_B5G6R5_UNORM_PACK16,
+ [AV_PIX_FMT_BGR0] = VK_FORMAT_B8G8R8A8_UNORM,
+ [AV_PIX_FMT_0BGR] = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+ [AV_PIX_FMT_RGB0] = VK_FORMAT_R8G8B8A8_UNORM,
+};
+
+enum VulkanExtensions {
+ EXT_DEDICATED_ALLOC = 1ULL << 0, /* VK_KHR_dedicated_allocation */
+ EXT_IMAGE_FORMAT_LIST = 1ULL << 1, /* VK_KHR_image_format_list */
+ EXT_EXTERNAL_MEMORY = 1ULL << 2, /* VK_KHR_external_memory */
+ EXT_EXTERNAL_HOST_MEMORY = 1ULL << 3, /* VK_EXT_external_memory_host */
+ EXT_EXTERNAL_FD_MEMORY = 1ULL << 4, /* VK_KHR_external_memory_fd */
+ EXT_EXTERNAL_DMABUF_MEMORY = 1ULL << 5, /* VK_EXT_external_memory_dma_buf */
+ EXT_DRM_MODIFIER_FLAGS = 1ULL << 6, /* VK_EXT_image_drm_format_modifier */
+ EXT_YUV_IMAGES = 1ULL << 7, /* VK_KHR_sampler_ycbcr_conversion */
+
+ EXT_OPTIONAL = 1ULL << 62,
+ EXT_REQUIRED = 1ULL << 63,
+};
+
+typedef struct VulkanOptExtension {
+ const char *name;
+ uint64_t flag;
+} VulkanOptExtension;
+
+VulkanOptExtension optional_instance_exts[] = {
+ { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, EXT_EXTERNAL_MEMORY, },
+ { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, EXT_REQUIRED },
+};
+
+VulkanOptExtension optional_device_exts[] = {
+ { VK_KHR_MAINTENANCE1_EXTENSION_NAME, EXT_REQUIRED },
+ { VK_KHR_MAINTENANCE2_EXTENSION_NAME, EXT_REQUIRED },
+ { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, EXT_REQUIRED },
+ { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, EXT_REQUIRED },
+ { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, EXT_REQUIRED },
+
+ { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, EXT_OPTIONAL, },
+ { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, EXT_OPTIONAL, }, /* TODO: Fix on AMD hardware */
+
+ { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, EXT_DEDICATED_ALLOC, },
+ { VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, EXT_IMAGE_FORMAT_LIST, },
+ { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, EXT_EXTERNAL_MEMORY, },
+ { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, EXT_EXTERNAL_DMABUF_MEMORY, },
+ { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, EXT_YUV_IMAGES },
+#if HAVE_VULKAN_DRM_MOD
+ { VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, EXT_DRM_MODIFIER_FLAGS, },
+#else
+ { VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, EXT_DRM_MODIFIER_FLAGS, },
+#endif
+};
+
+VkFormat av_vkfmt_from_pixfmt(enum AVPixelFormat p)
+{
+ if ((p >= 0 && p < AV_PIX_FMT_NB) && vk_format_map[p])
+ return vk_format_map[p];
+ return VK_FORMAT_UNDEFINED;
+}
+
+static int vkfmt_is_supported(AVVulkanDeviceContext *hwctx, enum AVPixelFormat p,
+ int linear)
+{
+ VkFormatFeatureFlags flags;
+ VkFormatProperties2 prop = {
+ .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+ };
+ VkFormat fmt = av_vkfmt_from_pixfmt(p);
+
+ if (fmt == VK_FORMAT_UNDEFINED)
+ return 0;
+
+ vkGetPhysicalDeviceFormatProperties2(hwctx->phys_dev, fmt, &prop);
+ flags = linear ? prop.formatProperties.linearTilingFeatures :
+ prop.formatProperties.optimalTilingFeatures;
+
+ return !!(flags & (VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT |
+ VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT |
+ VK_FORMAT_FEATURE_TRANSFER_SRC_BIT |
+ VK_FORMAT_FEATURE_TRANSFER_DST_BIT));
+}
+
+/* Converts return values to strings */
+static const char *vk_ret2str(VkResult res)
+{
+#define CASE(VAL) case VAL: return #VAL
+ switch (res) {
+ CASE(VK_SUCCESS);
+ CASE(VK_NOT_READY);
+ CASE(VK_TIMEOUT);
+ CASE(VK_EVENT_SET);
+ CASE(VK_EVENT_RESET);
+ CASE(VK_INCOMPLETE);
+ CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
+ CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
+ CASE(VK_ERROR_INITIALIZATION_FAILED);
+ CASE(VK_ERROR_DEVICE_LOST);
+ CASE(VK_ERROR_MEMORY_MAP_FAILED);
+ CASE(VK_ERROR_LAYER_NOT_PRESENT);
+ CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
+ CASE(VK_ERROR_FEATURE_NOT_PRESENT);
+ CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
+ CASE(VK_ERROR_TOO_MANY_OBJECTS);
+ CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
+ CASE(VK_ERROR_FRAGMENTED_POOL);
+ CASE(VK_ERROR_SURFACE_LOST_KHR);
+ CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
+ CASE(VK_SUBOPTIMAL_KHR);
+ CASE(VK_ERROR_OUT_OF_DATE_KHR);
+ CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
+ CASE(VK_ERROR_VALIDATION_FAILED_EXT);
+ CASE(VK_ERROR_INVALID_SHADER_NV);
+ CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
+ CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
+ CASE(VK_ERROR_NOT_PERMITTED_EXT);
+ default: return "Unknown error";
+ }
+#undef CASE
+}
+
+static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
+ VkDebugUtilsMessageTypeFlagsEXT messageType,
+ const VkDebugUtilsMessengerCallbackDataEXT *data,
+ void *priv)
+{
+ int l;
+ AVHWDeviceContext *ctx = priv;
+
+ switch (severity) {
+ case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l = AV_LOG_VERBOSE; break;
+ case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: l = AV_LOG_INFO; break;
+ case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l = AV_LOG_WARNING; break;
+ case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: l = AV_LOG_ERROR; break;
+ default: l = AV_LOG_DEBUG; break;
+ }
+
+ av_log(ctx, l, "%s\n", data->pMessage);
+ for (int i = 0; i < data->cmdBufLabelCount; i++)
+ av_log(ctx, l, "\t%i: %s\n", i, data->pCmdBufLabels[i].pLabelName);
+
+ return 0;
+}
+
+static int check_extensions(AVHWDeviceContext *ctx, int dev,
+ const char * const **dst, uint32_t *num, int debug)
+{
+ const char *tstr;
+ const char **extension_names = NULL;
+ VulkanDevicePriv *p = ctx->internal->priv;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ int err = 0, found, extensions_found = 0;
+
+ const char *mod;
+ int optional_exts_num;
+ uint32_t sup_ext_count;
+ VkExtensionProperties *sup_ext;
+ VulkanOptExtension *optional_exts;
+
+ if (!dev) {
+ mod = "instance";
+ optional_exts = optional_instance_exts;
+ optional_exts_num = FF_ARRAY_ELEMS(optional_instance_exts);
+ vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, NULL);
+ sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
+ if (!sup_ext)
+ return AVERROR(ENOMEM);
+ vkEnumerateInstanceExtensionProperties(NULL, &sup_ext_count, sup_ext);
+ } else {
+ mod = "device";
+ optional_exts = optional_device_exts;
+ optional_exts_num = FF_ARRAY_ELEMS(optional_device_exts);
+ vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
+ &sup_ext_count, NULL);
+ sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
+ if (!sup_ext)
+ return AVERROR(ENOMEM);
+ vkEnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
+ &sup_ext_count, sup_ext);
+ }
+
+ for (int i = 0; i < optional_exts_num; i++) {
+ int req = optional_exts[i].flag & EXT_REQUIRED;
+ tstr = optional_exts[i].name;
+
+ found = 0;
+ for (int j = 0; j < sup_ext_count; j++) {
+ if (!strcmp(tstr, sup_ext[j].extensionName)) {
+ found = 1;
+ break;
+ }
+ }
+ if (!found) {
+ int lvl = req ? AV_LOG_ERROR : AV_LOG_VERBOSE;
+ av_log(ctx, lvl, "Extension \"%s\" not found!\n", tstr);
+ if (req) {
+ err = AVERROR(EINVAL);
+ goto end;
+ }
+ continue;
+ }
+ if (!req)
+ p->extensions |= optional_exts[i].flag;
+
+ av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, tstr);
+
+ ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
+ }
+
+ if (debug && !dev) {
+ tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
+ found = 0;
+ for (int j = 0; j < sup_ext_count; j++) {
+ if (!strcmp(tstr, sup_ext[j].extensionName)) {
+ found = 1;
+ break;
+ }
+ }
+ if (found) {
+ ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
+ } else {
+ av_log(ctx, AV_LOG_ERROR, "Debug extension \"%s\" not found!\n",
+ tstr);
+ err = AVERROR(EINVAL);
+ goto end;
+ }
+ }
+
+ *dst = extension_names;
+ *num = extensions_found;
+
+end:
+ av_free(sup_ext);
+ return err;
+}
+
+/* Creates a VkInstance */
+static int create_instance(AVHWDeviceContext *ctx, AVDictionary *opts)
+{
+ int err = 0;
+ VkResult ret;
+ VulkanDevicePriv *p = ctx->internal->priv;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ AVDictionaryEntry *debug_opt = av_dict_get(opts, "debug", NULL, 0);
+ const int debug_mode = debug_opt && strtol(debug_opt->value, NULL, 10);
+ VkApplicationInfo application_info = {
+ .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+ .pEngineName = "libavutil",
+ .apiVersion = VK_API_VERSION_1_1,
+ .engineVersion = VK_MAKE_VERSION(LIBAVUTIL_VERSION_MAJOR,
+ LIBAVUTIL_VERSION_MINOR,
+ LIBAVUTIL_VERSION_MICRO),
+ };
+ VkInstanceCreateInfo inst_props = {
+ .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ .pApplicationInfo = &application_info,
+ };
+
+ /* Check for present/missing extensions */
+ err = check_extensions(ctx, 0, &inst_props.ppEnabledExtensionNames,
+ &inst_props.enabledExtensionCount, debug_mode);
+ if (err < 0)
+ return err;
+
+ if (debug_mode) {
+ static const char *layers[] = { "VK_LAYER_LUNARG_standard_validation" };
+ inst_props.ppEnabledLayerNames = layers;
+ inst_props.enabledLayerCount = FF_ARRAY_ELEMS(layers);
+ }
+
+ /* Try to create the instance */
+ ret = vkCreateInstance(&inst_props, hwctx->alloc, &hwctx->inst);
+
+ /* Free used memory */
+ av_free((void *)inst_props.ppEnabledExtensionNames);
+
+ /* Check for errors */
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Instance creation failure: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ if (debug_mode) {
+ VkDebugUtilsMessengerCreateInfoEXT dbg = {
+ .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
+ .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
+ .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
+ VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
+ .pfnUserCallback = vk_dbg_callback,
+ .pUserData = ctx,
+ };
+ VK_LOAD_PFN(hwctx->inst, vkCreateDebugUtilsMessengerEXT);
+
+ pfn_vkCreateDebugUtilsMessengerEXT(hwctx->inst, &dbg,
+ hwctx->alloc, &p->debug_ctx);
+ }
+
+ return 0;
+}
+
+typedef struct VulkanDeviceSelection {
+ const char *name; /* Will use this first unless NULL */
+ uint32_t pci_device; /* Will use this second unless 0x0 */
+ uint32_t vendor_id; /* Last resort to find something deterministic */
+ int index; /* Finally fall back to index */
+} VulkanDeviceSelection;
+
+static const char *vk_dev_type(enum VkPhysicalDeviceType type)
+{
+ switch (type) {
+ case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: return "integrated";
+ case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: return "discrete";
+ case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: return "virtual";
+ case VK_PHYSICAL_DEVICE_TYPE_CPU: return "software";
+ default: return "unknown";
+ }
+}
+
+/* Finds a device */
+static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
+{
+ int err = 0;
+ uint32_t num;
+ VkResult ret;
+ VkPhysicalDevice choice = NULL;
+ VkPhysicalDevice *devices = NULL;
+ VkPhysicalDeviceProperties *prop = NULL;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+
+ ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, NULL);
+ if (ret != VK_SUCCESS || !num) {
+ av_log(ctx, AV_LOG_ERROR, "No devices found: %s!\n", vk_ret2str(ret));
+ return AVERROR(ENODEV);
+ }
+
+ devices = av_malloc_array(num, sizeof(VkPhysicalDevice));
+ if (!devices)
+ return AVERROR(ENOMEM);
+
+ ret = vkEnumeratePhysicalDevices(hwctx->inst, &num, devices);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed enumerating devices: %s\n",
+ vk_ret2str(ret));
+ err = AVERROR(ENODEV);
+ goto end;
+ }
+
+ prop = av_malloc_array(num, sizeof(VkPhysicalDeviceProperties));
+ if (!prop) {
+ err = AVERROR(ENOMEM);
+ goto end;
+ }
+
+ av_log(ctx, AV_LOG_VERBOSE, "GPU listing:\n");
+ for (int i = 0; i < num; i++) {
+ vkGetPhysicalDeviceProperties(devices[i], &prop[i]);
+ av_log(ctx, AV_LOG_VERBOSE, " %d: %s (%s) (0x%x)\n", i, prop[i].deviceName,
+ vk_dev_type(prop[i].deviceType), prop[i].deviceID);
+ }
+
+ if (select->name) {
+ av_log(ctx, AV_LOG_VERBOSE, "Requested device: %s\n", select->name);
+ for (int i = 0; i < num; i++) {
+ if (strstr(prop[i].deviceName, select->name)) {
+ choice = devices[i];
+ goto end;
+ }
+ }
+ av_log(ctx, AV_LOG_ERROR, "Unable to find device \"%s\"!\n",
+ select->name);
+ err = AVERROR(ENODEV);
+ goto end;
+ } else if (select->pci_device) {
+ av_log(ctx, AV_LOG_VERBOSE, "Requested device: 0x%x\n", select->pci_device);
+ for (int i = 0; i < num; i++) {
+ if (select->pci_device == prop[i].deviceID) {
+ choice = devices[i];
+ goto end;
+ }
+ }
+ av_log(ctx, AV_LOG_ERROR, "Unable to find device with PCI ID 0x%x!\n",
+ select->pci_device);
+ err = AVERROR(EINVAL);
+ goto end;
+ } else if (select->vendor_id) {
+ av_log(ctx, AV_LOG_VERBOSE, "Requested vendor: 0x%x\n", select->vendor_id);
+ for (int i = 0; i < num; i++) {
+ if (select->vendor_id == prop[i].vendorID) {
+ choice = devices[i];
+ goto end;
+ }
+ }
+ av_log(ctx, AV_LOG_ERROR, "Unable to find device with Vendor ID 0x%x!\n",
+ select->vendor_id);
+ err = AVERROR(ENODEV);
+ goto end;
+ } else {
+ if (select->index < num) {
+ choice = devices[select->index];
+ goto end;
+ }
+ av_log(ctx, AV_LOG_ERROR, "Unable to find device with index %i!\n",
+ select->index);
+ err = AVERROR(ENODEV);
+ goto end;
+ }
+
+end:
+ av_free(devices);
+ av_free(prop);
+ hwctx->phys_dev = choice;
+
+ return err;
+}
+
+static int search_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
+{
+ uint32_t num;
+ VkQueueFamilyProperties *qs = NULL;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ int graph_index = -1, comp_index = -1, tx_index = -1;
+ VkDeviceQueueCreateInfo *pc = (VkDeviceQueueCreateInfo *)cd->pQueueCreateInfos;
+
+ /* First get the number of queue families */
+ vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, NULL);
+ if (!num) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
+ return AVERROR_EXTERNAL;
+ }
+
+ /* Then allocate memory */
+ qs = av_malloc_array(num, sizeof(VkQueueFamilyProperties));
+ if (!qs)
+ return AVERROR(ENOMEM);
+
+ /* Finally retrieve the queue families */
+ vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, qs);
+
+#define SEARCH_FLAGS(expr, out) \
+ for (int i = 0; i < num; i++) { \
+ const VkQueueFlagBits flags = qs[i].queueFlags; \
+ if (expr) { \
+ out = i; \
+ break; \
+ } \
+ }
+
+ SEARCH_FLAGS(flags & VK_QUEUE_GRAPHICS_BIT, graph_index)
+
+ SEARCH_FLAGS((flags & VK_QUEUE_COMPUTE_BIT) && (i != graph_index),
+ comp_index)
+
+ SEARCH_FLAGS((flags & VK_QUEUE_TRANSFER_BIT) && (i != graph_index) &&
+ (i != comp_index), tx_index)
+
+#undef SEARCH_FLAGS
+#define QF_FLAGS(flags) \
+ ((flags) & VK_QUEUE_GRAPHICS_BIT ) ? "(graphics) " : "", \
+ ((flags) & VK_QUEUE_COMPUTE_BIT ) ? "(compute) " : "", \
+ ((flags) & VK_QUEUE_TRANSFER_BIT ) ? "(transfer) " : "", \
+ ((flags) & VK_QUEUE_SPARSE_BINDING_BIT) ? "(sparse) " : ""
+
+ av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i for graphics, "
+ "flags: %s%s%s%s\n", graph_index, QF_FLAGS(qs[graph_index].queueFlags));
+
+ hwctx->queue_family_index = graph_index;
+ hwctx->queue_family_tx_index = graph_index;
+ hwctx->queue_family_comp_index = graph_index;
+
+ pc[cd->queueCreateInfoCount++].queueFamilyIndex = graph_index;
+
+ if (comp_index != -1) {
+ av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i for compute, "
+ "flags: %s%s%s%s\n", comp_index, QF_FLAGS(qs[comp_index].queueFlags));
+ hwctx->queue_family_tx_index = comp_index;
+ hwctx->queue_family_comp_index = comp_index;
+ pc[cd->queueCreateInfoCount++].queueFamilyIndex = comp_index;
+ }
+
+ if (tx_index != -1) {
+ av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i for transfers, "
+ "flags: %s%s%s%s\n", tx_index, QF_FLAGS(qs[tx_index].queueFlags));
+ hwctx->queue_family_tx_index = tx_index;
+ pc[cd->queueCreateInfoCount++].queueFamilyIndex = tx_index;
+ }
+
+#undef QF_FLAGS
+
+ av_free(qs);
+
+ return 0;
+}
+
+static int create_exec_ctx(AVHWDeviceContext *ctx)
+{
+ VkResult ret;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *p = ctx->internal->priv;
+
+ VkCommandPoolCreateInfo cqueue_create = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+ .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+ .queueFamilyIndex = hwctx->queue_family_tx_index,
+ };
+ VkCommandBufferAllocateInfo cbuf_create = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+ .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+ .commandBufferCount = 1,
+ };
+ VkFenceCreateInfo fence_spawn = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
+
+ ret = vkCreateCommandPool(hwctx->act_dev, &cqueue_create,
+ hwctx->alloc, &p->cmd_pool);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Command pool creation failure: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ cbuf_create.commandPool = p->cmd_pool;
+
+ ret = vkAllocateCommandBuffers(hwctx->act_dev, &cbuf_create, &p->cmd_buf);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ ret = vkCreateFence(hwctx->act_dev, &fence_spawn,
+ hwctx->alloc, &p->cmd_fence);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to create frame fence: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ vkGetDeviceQueue(hwctx->act_dev, hwctx->queue_family_tx_index, 0,
+ &p->cmd_queue);
+
+ return 0;
+}
+
+static void free_exec_ctx(AVHWDeviceContext *ctx)
+{
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *p = ctx->internal->priv;
+
+ if (p->cmd_fence)
+ vkDestroyFence(hwctx->act_dev, p->cmd_fence, hwctx->alloc);
+ if (p->cmd_buf)
+ vkFreeCommandBuffers(hwctx->act_dev, p->cmd_pool, 1, &p->cmd_buf);
+ if (p->cmd_pool)
+ vkDestroyCommandPool(hwctx->act_dev, p->cmd_pool, hwctx->alloc);
+}
+
+static void vulkan_device_free(AVHWDeviceContext *ctx)
+{
+ VulkanDevicePriv *p = ctx->internal->priv;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+
+ free_exec_ctx(ctx);
+
+ vkDestroyDevice(hwctx->act_dev, hwctx->alloc);
+
+ if (p->debug_ctx) {
+ VK_LOAD_PFN(hwctx->inst, vkDestroyDebugUtilsMessengerEXT);
+ pfn_vkDestroyDebugUtilsMessengerEXT(hwctx->inst, p->debug_ctx,
+ hwctx->alloc);
+ }
+
+ vkDestroyInstance(hwctx->inst, hwctx->alloc);
+}
+
+static int vulkan_device_create_internal(AVHWDeviceContext *ctx,
+ VulkanDeviceSelection *dev_select,
+ AVDictionary *opts, int flags)
+{
+ int err = 0;
+ VkResult ret;
+ AVDictionaryEntry *opt_d;
+ VulkanDevicePriv *p = ctx->internal->priv;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VkDeviceQueueCreateInfo queue_create_info[3] = {
+ { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ .pQueuePriorities = (float []){ 1.0f },
+ .queueCount = 1, },
+ { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ .pQueuePriorities = (float []){ 1.0f },
+ .queueCount = 1, },
+ { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ .pQueuePriorities = (float []){ 1.0f },
+ .queueCount = 1, },
+ };
+
+ VkDeviceCreateInfo dev_info = {
+ .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+ .pQueueCreateInfos = queue_create_info,
+ .queueCreateInfoCount = 0,
+ };
+
+ ctx->free = vulkan_device_free;
+
+ /* Create an instance if not given one */
+ if ((err = create_instance(ctx, opts)))
+ goto end;
+
+ /* Find a device (if not given one) */
+ if ((err = find_device(ctx, dev_select)))
+ goto end;
+
+ vkGetPhysicalDeviceProperties(hwctx->phys_dev, &p->props);
+ av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n", p->props.deviceName);
+ av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
+ av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyOffsetAlignment: %li\n",
+ p->props.limits.optimalBufferCopyOffsetAlignment);
+ av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyRowPitchAlignment: %li\n",
+ p->props.limits.optimalBufferCopyRowPitchAlignment);
+ av_log(ctx, AV_LOG_VERBOSE, " minMemoryMapAlignment: %li\n",
+ p->props.limits.minMemoryMapAlignment);
+
+ /* Search queue family */
+ if ((err = search_queue_families(ctx, &dev_info)))
+ goto end;
+
+ if ((err = check_extensions(ctx, 1, &dev_info.ppEnabledExtensionNames,
+ &dev_info.enabledExtensionCount, 0)))
+ goto end;
+
+ ret = vkCreateDevice(hwctx->phys_dev, &dev_info, hwctx->alloc,
+ &hwctx->act_dev);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Device creation failure: %s\n",
+ vk_ret2str(ret));
+ err = AVERROR_EXTERNAL;
+ goto end;
+ }
+
+ av_free((void *)dev_info.ppEnabledExtensionNames);
+
+ /* Tiled images setting, use them by default */
+ opt_d = av_dict_get(opts, "linear_images", NULL, 0);
+ if (opt_d)
+ p->use_linear_images = strtol(opt_d->value, NULL, 10);
+
+ /* Disjoint images setting, don't use them by default */
+ opt_d = av_dict_get(opts, "disjoint_images", NULL, 0);
+ if (opt_d)
+ p->use_disjoint_images = strtol(opt_d->value, NULL, 10);
+
+end:
+ return err;
+}
+
+static int vulkan_device_init(AVHWDeviceContext *ctx)
+{
+ int err;
+ uint32_t queue_num;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *p = ctx->internal->priv;
+
+ vkGetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
+ if (!queue_num) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
+ return AVERROR_EXTERNAL;
+ }
+
+ if (hwctx->queue_family_index >= queue_num ||
+ hwctx->queue_family_tx_index >= queue_num ||
+ hwctx->queue_family_comp_index >= queue_num) {
+ av_log(ctx, AV_LOG_ERROR, "Invalid queue index!\n");
+ return AVERROR_EXTERNAL;
+ }
+
+ /* Create exec context - if there's something invalid this will error out */
+ err = create_exec_ctx(ctx);
+ if (err)
+ return err;
+
+ /* Get device capabilities */
+ vkGetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
+
+ return 0;
+}
+
+static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device,
+ AVDictionary *opts, int flags)
+{
+ VulkanDeviceSelection dev_select = { 0 };
+ if (device && device[0]) {
+ char *end = NULL;
+ dev_select.index = strtol(device, &end, 10);
+ if (end == device) {
+ dev_select.index = 0;
+ dev_select.name = device;
+ }
+ }
+
+ return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
+}
+
+static int vulkan_device_derive(AVHWDeviceContext *ctx,
+ AVHWDeviceContext *src_ctx, int flags)
+{
+ VulkanDeviceSelection dev_select = { 0 };
+
+ switch(src_ctx->type) {
+#if CONFIG_LIBDRM
+#if CONFIG_VAAPI
+ case AV_HWDEVICE_TYPE_VAAPI: {
+ AVVAAPIDeviceContext *src_hwctx = src_ctx->hwctx;
+ const char *vendor = vaQueryVendorString(src_hwctx->display);
+ if (!vendor) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to get device info from vaapi!\n");
+ return AVERROR_EXTERNAL;
+ }
+
+ if (strstr(vendor, "Intel"))
+ dev_select.vendor_id = 0x8086;
+ if (strstr(vendor, "AMD"))
+ dev_select.vendor_id = 0x1002;
+
+ return vulkan_device_create_internal(ctx, &dev_select, NULL, flags);
+ }
+#endif
+ case AV_HWDEVICE_TYPE_DRM: {
+ AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
+
+ drmDevice *drm_dev_info;
+ int err = drmGetDevice(src_hwctx->fd, &drm_dev_info);
+ if (err) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to get device info from drm fd!\n");
+ return AVERROR_EXTERNAL;
+ }
+
+ if (drm_dev_info->bustype == DRM_BUS_PCI)
+ dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
+
+ drmFreeDevice(&drm_dev_info);
+
+ return vulkan_device_create_internal(ctx, &dev_select, NULL, flags);
+ }
+#endif
+ default:
+ return AVERROR(ENOSYS);
+ }
+}
+
+static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
+ const void *hwconfig,
+ AVHWFramesConstraints *constraints)
+{
+ int count = 0;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *p = ctx->internal->priv;
+
+ for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
+ count += vkfmt_is_supported(hwctx, i, p->use_linear_images);
+
+ constraints->valid_sw_formats = av_malloc_array(count + 1,
+ sizeof(enum AVPixelFormat));
+ if (!constraints->valid_sw_formats)
+ return AVERROR(ENOMEM);
+
+ count = 0;
+ for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
+ if (vkfmt_is_supported(hwctx, i, p->use_linear_images))
+ constraints->valid_sw_formats[count++] = i;
+ constraints->valid_sw_formats[count++] = AV_PIX_FMT_NONE;
+
+ constraints->min_width = 0;
+ constraints->min_height = 0;
+ constraints->max_width = p->props.limits.maxImageDimension2D;
+ constraints->max_height = p->props.limits.maxImageDimension2D;
+
+ constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
+ if (!constraints->valid_hw_formats)
+ return AVERROR(ENOMEM);
+
+ constraints->valid_hw_formats[0] = AV_PIX_FMT_VULKAN;
+ constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
+
+ return 0;
+}
+
+static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
+ VkMemoryPropertyFlagBits req_flags, void *alloc_extension,
+ VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
+{
+ VkResult ret;
+ int index = -1;
+ VulkanDevicePriv *p = ctx->internal->priv;
+ AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
+ VkMemoryAllocateInfo alloc_info = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ .pNext = alloc_extension,
+ };
+
+ /* Align if we need to */
+ if (req_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
+ req->size = FFALIGN(req->size, p->props.limits.minMemoryMapAlignment);
+
+ alloc_info.allocationSize = req->size;
+
+ /* The vulkan spec requires memory types to be sorted in the "optimal"
+ * order, so the first matching type we find will be the best/fastest one */
+ for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
+ /* The memory type must be supported by the requirements (bitfield) */
+ if (!(req->memoryTypeBits & (1 << i)))
+ continue;
+
+ /* The memory type flags must include our properties */
+ if ((p->mprops.memoryTypes[i].propertyFlags & req_flags) != req_flags)
+ continue;
+
+ /* Found a suitable memory type */
+ index = i;
+ break;
+ }
+
+ if (index < 0) {
+ av_log(ctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
+ req_flags);
+ return AVERROR(EINVAL);
+ }
+
+ alloc_info.memoryTypeIndex = index;
+
+ ret = vkAllocateMemory(dev_hwctx->act_dev, &alloc_info,
+ dev_hwctx->alloc, mem);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
+ vk_ret2str(ret));
+ return AVERROR(ENOMEM);
+ }
+
+ *mem_flags |= p->mprops.memoryTypes[index].propertyFlags;
+
+ return 0;
+}
+
+static void vulkan_frame_free(void *opaque, uint8_t *data)
+{
+ AVVkFrame *f = (AVVkFrame *)data;
+ AVVulkanDeviceContext *hwctx = opaque;
+
+ if (!f)
+ return;
+
+ vkDestroyImage(hwctx->act_dev, f->img, hwctx->alloc);
+ for (int i = 0; i < f->mem_count; i++)
+ vkFreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
+
+ av_free(f);
+}
+
+static int alloc_bind_mem(AVHWDeviceContext *ctx, AVVkFrame *f,
+ void *alloc_pnext, size_t alloc_pnext_stride)
+{
+ int err;
+ VkResult ret;
+ VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
+ VkBindImagePlaneMemoryInfo bind_p_info[AV_NUM_DATA_POINTERS] = { { 0 } };
+
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *p = ctx->internal->priv;
+
+ VK_LOAD_PFN(hwctx->inst, vkBindImageMemory2KHR);
+ VK_LOAD_PFN(hwctx->inst, vkGetImageMemoryRequirements2KHR);
+
+ for (int i = 0; i < f->mem_count; i++) {
+ int use_ded_mem;
+ VkImagePlaneMemoryRequirementsInfo plane_req = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+ .planeAspect = i == 0 ? VK_IMAGE_ASPECT_PLANE_0_BIT :
+ i == 1 ? VK_IMAGE_ASPECT_PLANE_1_BIT :
+ VK_IMAGE_ASPECT_PLANE_2_BIT,
+ };
+ VkImageMemoryRequirementsInfo2 req_desc = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+ .pNext = f->mem_count > 1 ? &plane_req : NULL,
+ .image = f->img,
+ };
+ VkMemoryDedicatedAllocateInfo ded_alloc = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ .pNext = (void *)(((uint8_t *)alloc_pnext) + i*alloc_pnext_stride),
+ };
+ VkMemoryDedicatedRequirements ded_req = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+ };
+ VkMemoryRequirements2 req = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+ .pNext = (p->extensions & EXT_DEDICATED_ALLOC) ? &ded_req : NULL,
+ };
+
+ pfn_vkGetImageMemoryRequirements2KHR(hwctx->act_dev, &req_desc, &req);
+
+ /* In case the implementation prefers/requires dedicated allocation */
+ use_ded_mem = ded_req.prefersDedicatedAllocation |
+ ded_req.requiresDedicatedAllocation;
+ if (use_ded_mem)
+ ded_alloc.image = f->img;
+
+ /* Allocate memory */
+ if ((err = alloc_mem(ctx, &req.memoryRequirements,
+ f->tiling == VK_IMAGE_TILING_LINEAR ?
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
+ VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+ use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
+ &f->flags, &f->mem[i])))
+ return err;
+
+ if (f->mem_count > 1) {
+ bind_p_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
+ bind_p_info[i].planeAspect = plane_req.planeAspect;
+ bind_info[i].pNext = &bind_p_info[i];
+ }
+
+ bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+ bind_info[i].image = f->img;
+ bind_info[i].memory = f->mem[i];
+ }
+
+ /* Bind the allocated memory to the image */
+ ret = pfn_vkBindImageMemory2KHR(hwctx->act_dev, f->mem_count, bind_info);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ return 0;
+}
+
+static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame,
+ VkImageTiling tiling, VkImageUsageFlagBits usage,
+ int disjoint, void *create_pnext, void *alloc_pnext,
+ size_t alloc_pnext_stride)
+{
+ int err;
+ VkResult ret;
+ AVHWDeviceContext *ctx = hwfc->device_ctx;
+ enum AVPixelFormat format = hwfc->sw_format;
+ VkFormat img_fmt = av_vkfmt_from_pixfmt(format);
+ const int planes = av_pix_fmt_count_planes(format);
+
+ /* Allocated */
+ AVVkFrame *f = NULL;
+
+ /* Contexts */
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *p = ctx->internal->priv;
+
+ /* Image properties */
+ VkFormat possible_fmts[2];
+ VkImageFormatListCreateInfoKHR img_fmt_list = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+ .pNext = create_pnext,
+ .pViewFormats = possible_fmts,
+ .viewFormatCount = 1,
+ };
+ VkImageCreateInfo image_create_info = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+ .pNext = create_pnext,
+ .imageType = VK_IMAGE_TYPE_2D,
+ .format = img_fmt,
+ .extent.width = hwfc->width,
+ .extent.height = hwfc->height,
+ .extent.depth = 1,
+ .mipLevels = 1,
+ .arrayLayers = 1,
+ .flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
+ VK_IMAGE_CREATE_EXTENDED_USAGE_BIT |
+ (disjoint ? VK_IMAGE_CREATE_DISJOINT_BIT : 0),
+ .tiling = tiling,
+ .initialLayout = tiling == VK_IMAGE_TILING_LINEAR ?
+ VK_IMAGE_LAYOUT_PREINITIALIZED :
+ VK_IMAGE_LAYOUT_UNDEFINED,
+ .usage = usage,
+ .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
+ .samples = VK_SAMPLE_COUNT_1_BIT,
+ };
+
+ if (img_fmt == VK_FORMAT_UNDEFINED) {
+ av_log(ctx, AV_LOG_ERROR, "Unsupported image format!\n");
+ return AVERROR(EINVAL);
+ }
+
+ f = av_mallocz(sizeof(*f));
+ if (!f) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
+ err = AVERROR(ENOMEM);
+ goto fail;
+ }
+
+ /* Needed */
+ f->flags = 0;
+ f->mem_count = disjoint ? planes : 1;
+ f->tiling = image_create_info.tiling;
+ f->layout = image_create_info.initialLayout;
+ f->access = 0;
+
+ possible_fmts[0] = image_create_info.format;
+ /* Mark the formats that a VkImageView can be made of if supported */
+ if ((planes > 1) && (p->extensions & EXT_IMAGE_FORMAT_LIST)) {
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
+ switch (desc->comp[0].depth) {
+ case 8: possible_fmts[1] = VK_FORMAT_R8_UNORM; break;
+ case 10: possible_fmts[1] = VK_FORMAT_R10X6_UNORM_PACK16; break;
+ case 12: possible_fmts[1] = VK_FORMAT_R12X4_UNORM_PACK16; break;
+ case 16: possible_fmts[1] = VK_FORMAT_R16_UNORM; break;
+ }
+ img_fmt_list.viewFormatCount++;
+ image_create_info.pNext = &img_fmt_list;
+ }
+
+ /* Create the image */
+ ret = vkCreateImage(hwctx->act_dev, &image_create_info,
+ hwctx->alloc, &f->img);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
+ vk_ret2str(ret));
+ err = AVERROR(EINVAL);
+ goto fail;
+ }
+
+ if ((err = alloc_bind_mem(ctx, f, alloc_pnext, alloc_pnext_stride)))
+ goto fail;
+
+ *frame = f;
+ return 0;
+
+fail:
+ vulkan_frame_free(hwctx, (uint8_t *)f);
+ return err;
+}
+
+/* Checks if an export flag is enabled, and if it is ORs it with *iexp */
+static void try_export_flags(AVHWFramesContext *hwfc,
+ VkExternalMemoryHandleTypeFlagBits *iexp,
+ VkExternalMemoryHandleTypeFlagBits exp)
+{
+ VkResult ret;
+ AVVulkanFramesContext *hwctx = hwfc->hwctx;
+ AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
+ VK_LOAD_PFN(dev_hwctx->inst, vkGetPhysicalDeviceImageFormatProperties2);
+ VkExternalImageFormatProperties eprops = {
+ .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+ };
+ VkImageFormatProperties2 props = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+ .pNext = &eprops,
+ };
+ VkPhysicalDeviceExternalImageFormatInfo enext = {
+ .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+ .handleType = exp,
+ };
+ VkPhysicalDeviceImageFormatInfo2 pinfo = {
+ .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+ .pNext = &enext,
+ .format = av_vkfmt_from_pixfmt(hwfc->sw_format),
+ .type = VK_IMAGE_TYPE_2D,
+ .tiling = hwctx->tiling,
+ .usage = hwctx->usage,
+ .flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
+ VK_IMAGE_CREATE_EXTENDED_USAGE_BIT |
+ (hwctx->disjoint ? VK_IMAGE_CREATE_DISJOINT_BIT : 0),
+ };
+ ret = pfn_vkGetPhysicalDeviceImageFormatProperties2(dev_hwctx->phys_dev,
+ &pinfo, &props);
+ if (ret == VK_SUCCESS)
+ *iexp |= exp;
+}
+
+static AVBufferRef *vulkan_pool_alloc(void *opaque, int size)
+{
+ int err;
+ AVVkFrame *f;
+ AVBufferRef *avbuf = NULL;
+ AVHWFramesContext *hwfc = opaque;
+ AVVulkanFramesContext *hwctx = hwfc->hwctx;
+ VkExportMemoryAllocateInfo einfo[AV_NUM_DATA_POINTERS];
+ VkExternalMemoryHandleTypeFlags e = 0x0;
+
+ try_export_flags(hwfc, &e, VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
+
+ for (int i = 0; i < av_pix_fmt_count_planes(hwfc->sw_format); i++) {
+ einfo[i].sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+ einfo[i].pNext = hwctx->alloc_pnext[i];
+ einfo[i].handleTypes = e;
+ }
+
+ err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
+ hwctx->disjoint, hwctx->create_pnext,
+ einfo, sizeof(*einfo));
+ if (err)
+ return NULL;
+
+ avbuf = av_buffer_create((uint8_t *)f, sizeof(AVVkFrame),
+ vulkan_frame_free, hwfc->device_ctx->hwctx, 0);
+ if (!avbuf) {
+ vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
+ return NULL;
+ }
+
+ return avbuf;
+}
+
+static int vulkan_frames_init(AVHWFramesContext *hwfc)
+{
+ int err = 0;
+ AVVkFrame *f;
+ AVVulkanFramesContext *hwctx = hwfc->hwctx;
+ VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
+
+ if (hwfc->pool)
+ return 0;
+
+ /* Default pool flags */
+ hwctx->tiling = hwctx->tiling ? hwctx->tiling : p->use_linear_images ?
+ VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
+
+ hwctx->usage |= DEFAULT_USAGE_FLAGS;
+
+ hwctx->disjoint = hwctx->disjoint ? hwctx->disjoint : p->use_disjoint_images;
+
+ /* Test to see if allocation will fail */
+ err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage, hwctx->disjoint,
+ hwctx->create_pnext, NULL, 0);
+ if (err)
+ return err;
+
+ vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
+
+ hwfc->internal->pool_internal = av_buffer_pool_init2(sizeof(AVVkFrame),
+ hwfc, vulkan_pool_alloc,
+ NULL);
+ if (!hwfc->internal->pool_internal)
+ return AVERROR(ENOMEM);
+
+ return 0;
+}
+
+static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
+{
+ frame->buf[0] = av_buffer_pool_get(hwfc->pool);
+ if (!frame->buf[0])
+ return AVERROR(ENOMEM);
+
+ frame->data[0] = frame->buf[0]->data;
+ frame->format = AV_PIX_FMT_VULKAN;
+ frame->width = hwfc->width;
+ frame->height = hwfc->height;
+
+ return 0;
+}
+
+static int vulkan_transfer_get_formats(AVHWFramesContext *hwfc,
+ enum AVHWFrameTransferDirection dir,
+ enum AVPixelFormat **formats)
+{
+ int count = 0;
+ enum AVPixelFormat *pix_fmts = NULL;
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
+
+ /* All formats can be transferred to themselves */
+ count++;
+
+ /* All formats with a luma can have only that channel transferred */
+ count += !(desc->flags & AV_PIX_FMT_FLAG_RGB);
+
+ pix_fmts = av_malloc((count + 1) * sizeof(*pix_fmts));
+ if (!pix_fmts)
+ return AVERROR(ENOMEM);
+
+ count = 0;
+ pix_fmts[count++] = hwfc->sw_format;
+ if (!(desc->flags & AV_PIX_FMT_FLAG_RGB)) {
+ switch (desc->comp[0].depth) {
+ case 8: pix_fmts[count++] = AV_PIX_FMT_GRAY8; break;
+ case 10: pix_fmts[count++] = AV_PIX_FMT_GRAY10; break;
+ case 12: pix_fmts[count++] = AV_PIX_FMT_GRAY12; break;
+ case 16: pix_fmts[count++] = AV_PIX_FMT_GRAY16; break;
+ }
+ }
+ pix_fmts[count++] = AV_PIX_FMT_NONE;
+
+ *formats = pix_fmts;
+
+ return 0;
+}
+
+typedef struct VulkanMapping {
+ AVVkFrame *frame;
+ int flags;
+} VulkanMapping;
+
+static void vulkan_unmap_frame(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
+{
+ VulkanMapping *map = hwmap->priv;
+ AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
+
+ /* Check if buffer needs flushing */
+ if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
+ !(map->frame->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
+ VkResult ret;
+ VkMappedMemoryRange flush_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
+
+ for (int i = 0; i < map->frame->mem_count; i++) {
+ flush_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ flush_ranges[i].memory = map->frame->mem[i];
+ flush_ranges[i].size = VK_WHOLE_SIZE;
+ }
+
+ ret = vkFlushMappedMemoryRanges(hwctx->act_dev, map->frame->mem_count,
+ flush_ranges);
+ if (ret != VK_SUCCESS) {
+ av_log(hwfc, AV_LOG_ERROR, "Failed to flush memory: %s\n",
+ vk_ret2str(ret));
+ }
+ }
+
+ for (int i = 0; i < map->frame->mem_count; i++)
+ vkUnmapMemory(hwctx->act_dev, map->frame->mem[i]);
+
+ av_free(map);
+}
+
+static int vulkan_map_frame(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src, int flags)
+{
+ VkResult ret;
+ int err, mapped_mem_count = 0;
+ AVVkFrame *f = (AVVkFrame *)src->data[0];
+ AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
+ const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
+
+ VulkanMapping *map = av_mallocz(sizeof(VulkanMapping));
+ if (!map)
+ return AVERROR(EINVAL);
+
+ if (src->format != AV_PIX_FMT_VULKAN) {
+ av_log(hwfc, AV_LOG_ERROR, "Cannot map from pixel format %s!\n",
+ av_get_pix_fmt_name(src->format));
+ err = AVERROR(EINVAL);
+ goto fail;
+ }
+
+ if (!(f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ||
+ !(f->tiling == VK_IMAGE_TILING_LINEAR)) {
+ av_log(hwfc, AV_LOG_ERROR, "Unable to map frame, not host visible "
+ "and linear!\n");
+ err = AVERROR(EINVAL);
+ goto fail;
+ }
+
+ dst->width = src->width;
+ dst->height = src->height;
+
+ for (int i = 0; i < f->mem_count; i++) {
+ ret = vkMapMemory(hwctx->act_dev, f->mem[i], 0,
+ VK_WHOLE_SIZE, 0, (void **)&dst->data[i]);
+ if (ret != VK_SUCCESS) {
+ av_log(hwfc, AV_LOG_ERROR, "Failed to map image memory: %s\n",
+ vk_ret2str(ret));
+ err = AVERROR_EXTERNAL;
+ goto fail;
+ }
+ mapped_mem_count++;
+ }
+
+ /* For non disjoint memory duplicate them */
+ if (f->mem_count == 1)
+ for (int i = 1; i < planes; i++)
+ dst->data[i] = dst->data[0];
+
+ /* Check if the memory contents matter */
+ if (((flags & AV_HWFRAME_MAP_READ) || !(flags & AV_HWFRAME_MAP_OVERWRITE)) &&
+ !(f->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
+ VkMappedMemoryRange map_mem_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
+ for (int i = 0; i < f->mem_count; i++) {
+ map_mem_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+ map_mem_ranges[i].size = VK_WHOLE_SIZE;
+ map_mem_ranges[i].memory = f->mem[i];
+ }
+
+ ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, f->mem_count,
+ map_mem_ranges);
+ if (ret != VK_SUCCESS) {
+ av_log(hwfc, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
+ vk_ret2str(ret));
+ err = AVERROR_EXTERNAL;
+ goto fail;
+ }
+ }
+
+ for (int i = 0; i < planes; i++) {
+ VkImageSubresource sub = {
+ .aspectMask = planes < 2 ? VK_IMAGE_ASPECT_COLOR_BIT :
+ i == 0 ? VK_IMAGE_ASPECT_PLANE_0_BIT :
+ i == 1 ? VK_IMAGE_ASPECT_PLANE_1_BIT :
+ VK_IMAGE_ASPECT_PLANE_2_BIT,
+ };
+ VkSubresourceLayout layout;
+ vkGetImageSubresourceLayout(hwctx->act_dev, f->img, &sub, &layout);
+ dst->data[i] += layout.offset;
+ dst->linesize[i] = layout.rowPitch;
+ }
+
+ map->frame = f;
+ map->flags = flags;
+
+ err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
+ &vulkan_unmap_frame, map);
+ if (err < 0)
+ goto fail;
+
+ return 0;
+
+fail:
+ for (int i = 0; i < mapped_mem_count; i++)
+ vkUnmapMemory(hwctx->act_dev, f->mem[i]);
+
+ av_free(map);
+ return err;
+}
+
+#if CONFIG_LIBDRM
+static void vulkan_unmap_from(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
+{
+ VulkanMapping *map = hwmap->priv;
+ AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
+
+ vkDestroyImage(hwctx->act_dev, map->frame->img, hwctx->alloc);
+ for (int i = 0; i < map->frame->mem_count; i++)
+ vkFreeMemory(hwctx->act_dev, map->frame->mem[i], hwctx->alloc);
+
+ av_freep(&map->frame);
+}
+
+static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **f,
+ AVDRMFrameDescriptor *desc)
+{
+ int err = 0;
+
+ /* Destination frame */
+#if HAVE_VULKAN_DRM_MOD
+ uint64_t modifier_buf[AV_NUM_DATA_POINTERS];
+ VkImageDrmFormatModifierListCreateInfoEXT drm_mod = {
+ .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
+ };
+#endif
+ VkExternalMemoryImageCreateInfo ext_info = {
+ .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+#if HAVE_VULKAN_DRM_MOD
+ .pNext = &drm_mod,
+#endif
+ .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+ };
+ VkImportMemoryFdInfoKHR import_desc[AV_NUM_DATA_POINTERS];
+
+ if ((desc->nb_objects > 1) &&
+ (desc->nb_objects != av_pix_fmt_count_planes(hwfc->sw_format))) {
+ av_log(hwfc, AV_LOG_ERROR, "Number of DRM objects doesn't match "
+ "plane count!\n");
+ return AVERROR(EINVAL);
+ }
+
+ for (int i = 0; i < desc->nb_objects; i++) {
+ import_desc[i].sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR;
+ import_desc[i].pNext = NULL;
+ import_desc[i].handleType = ext_info.handleTypes;
+ import_desc[i].fd = desc->objects[i].fd;
+#if HAVE_VULKAN_DRM_MOD
+ modifier_buf[i] = desc->objects[i].format_modifier;
+ if (modifier_buf[i] == DRM_FORMAT_MOD_INVALID) {
+ av_log(hwfc, AV_LOG_ERROR, "DRM format modifier is invalid!\n");
+ return AVERROR(EINVAL);
+ }
+#endif
+ }
+#if HAVE_VULKAN_DRM_MOD
+ drm_mod.pDrmFormatModifiers = modifier_buf;
+ drm_mod.drmFormatModifierCount = desc->nb_objects;
+#endif
+
+ err = create_frame(hwfc, f,
+#if HAVE_VULKAN_DRM_MOD
+ VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,
+#else
+ desc->objects[0].format_modifier == DRM_FORMAT_MOD_LINEAR ?
+ VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL,
+#endif
+ DEFAULT_USAGE_FLAGS, desc->nb_objects > 1, &ext_info,
+ import_desc, sizeof(*import_desc));
+ if (err < 0)
+ return err;
+
+ return 0;
+}
+
+static int vulkan_map_from_drm(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src, int flags)
+{
+ int err = 0;
+ AVVkFrame *f;
+ VulkanMapping *map = NULL;
+
+ err = vulkan_map_from_drm_frame_desc(hwfc, &f,
+ (AVDRMFrameDescriptor *)src->data[0]);
+ if (err)
+ goto fail;
+
+ /* The unmapping function will free this */
+ dst->data[0] = (uint8_t *)f;
+ dst->width = src->width;
+ dst->height = src->height;
+
+ map = av_mallocz(sizeof(VulkanMapping));
+ if (!map)
+ goto fail;
+
+ map->frame = f;
+ map->flags = flags;
+
+ err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
+ &vulkan_unmap_from, map);
+ if (err < 0)
+ goto fail;
+
+ av_log(hwfc, AV_LOG_DEBUG, "Mapped DRM object to Vulkan!\n");
+
+ return 0;
+
+fail:
+ vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
+ av_free(map);
+ return err;
+}
+
+#if CONFIG_VAAPI
+static int vulkan_map_from_vaapi(AVHWFramesContext *dst_fc,
+ AVFrame *dst, const AVFrame *src,
+ int flags)
+{
+ int err;
+ AVFrame *tmp = av_frame_alloc();
+ if (!tmp)
+ return AVERROR(ENOMEM);
+
+ tmp->format = AV_PIX_FMT_DRM_PRIME;
+
+ err = av_hwframe_map(tmp, src, flags);
+ if (err < 0)
+ goto fail;
+
+ err = vulkan_map_from_drm(dst_fc, dst, tmp, flags);
+ if (err < 0)
+ goto fail;
+
+ err = ff_hwframe_map_replace(dst, src);
+
+fail:
+ av_frame_free(&tmp);
+ return err;
+}
+#endif
+#endif
+
+static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src, int flags)
+{
+ VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
+
+ if (!(p->extensions & EXT_EXTERNAL_MEMORY)) {
+ av_log(hwfc, AV_LOG_ERROR, "Cannot import any external memory, "
+ "VK_KHR_external_memory is unsupported!\n");
+ return AVERROR(ENOSYS);
+ }
+
+ switch (src->format) {
+#if CONFIG_LIBDRM
+#if CONFIG_VAAPI
+ case AV_PIX_FMT_VAAPI:
+ if (p->extensions & EXT_DRM_MODIFIER_FLAGS)
+ return vulkan_map_from_vaapi(hwfc, dst, src, flags);
+#endif
+ case AV_PIX_FMT_DRM_PRIME:
+ if (p->extensions & EXT_DRM_MODIFIER_FLAGS)
+ return vulkan_map_from_drm(hwfc, dst, src, flags);
+#endif
+ default:
+ return AVERROR(ENOSYS);
+ }
+}
+
+#if CONFIG_LIBDRM
+typedef struct VulkanDRMMapping {
+ AVDRMFrameDescriptor drm_desc;
+ AVVkFrame *source;
+} VulkanDRMMapping;
+
+static void vulkan_unmap_to_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
+{
+ AVDRMFrameDescriptor *drm_desc = hwmap->priv;
+
+ for (int i = 0; i < drm_desc->nb_objects; i++)
+ close(drm_desc->objects[i].fd);
+
+ av_free(drm_desc);
+}
+
+const uint32_t drm_format_map[AV_PIX_FMT_NB] = {
+ [AV_PIX_FMT_NV12] = DRM_FORMAT_NV12,
+ [AV_PIX_FMT_YUV420P] = DRM_FORMAT_YVU420,
+};
+
+static int vulkan_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src, int flags)
+{
+ int err = 0;
+ VkResult ret;
+ AVVkFrame *f = (AVVkFrame *)src->data[0];
+ AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
+ VK_LOAD_PFN(hwctx->inst, vkGetMemoryFdKHR);
+#if HAVE_VULKAN_DRM_MOD
+ VkImageDrmFormatModifierPropertiesEXT drm_mod = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
+ };
+#endif
+
+ AVDRMFrameDescriptor *drm_desc = av_mallocz(sizeof(*drm_desc));
+ if (!drm_desc)
+ return AVERROR(ENOMEM);
+
+ err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, &vulkan_unmap_to_drm, drm_desc);
+ if (err < 0)
+ goto end;
+
+#if HAVE_VULKAN_DRM_MOD
+ ret = vkGetImageDrmFormatModifierPropertiesEXT(hwctx->act_dev, f->img,
+ &drm_mod);
+ if (ret != VK_SUCCESS) {
+ av_log(hwfc, AV_LOG_ERROR, "Failed to retrieve DRM format modifier!\n");
+ err = AVERROR_EXTERNAL;
+ goto end;
+ }
+#endif
+
+ drm_desc->nb_objects = f->mem_count;
+ for (int i = 0; i < drm_desc->nb_objects; i++) {
+ VkMemoryGetFdInfoKHR export_info = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+ .memory = f->mem[i],
+ .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+ };
+
+ ret = pfn_vkGetMemoryFdKHR(hwctx->act_dev, &export_info,
+ &drm_desc->objects[i].fd);
+ if (ret != VK_SUCCESS) {
+ av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
+ err = AVERROR_EXTERNAL;
+ goto end;
+ }
+
+ drm_desc->objects[i].size = lseek(drm_desc->objects[i].fd, 0, SEEK_END);
+#if HAVE_VULKAN_DRM_MOD
+ drm_desc->objects[i].format_modifier = drm_mod.drmFormatModifier;
+#endif
+ }
+
+ drm_desc->nb_layers = 1;
+ for (int i = 0; i < drm_desc->nb_layers; i++) {
+ drm_desc->layers[i].format = drm_format_map[hwfc->sw_format];
+ drm_desc->layers[i].nb_planes = av_pix_fmt_count_planes(hwfc->sw_format);
+
+ if (!drm_desc->layers[i].format) {
+ av_log(hwfc, AV_LOG_ERROR, "Cannot map to DRM layer, unsupported!\n");
+ err = AVERROR_PATCHWELCOME;
+ goto end;
+ }
+
+ for (int j = 0; j < drm_desc->layers[i].nb_planes; j++) {
+ const int disjoint = drm_desc->nb_objects;
+ const int nb_planes = drm_desc->layers[i].nb_planes;
+ VkSubresourceLayout layout;
+ VkImageSubresource sub = {
+ .aspectMask = nb_planes < 2 ? VK_IMAGE_ASPECT_COLOR_BIT :
+ i == 0 ? VK_IMAGE_ASPECT_PLANE_0_BIT :
+ i == 1 ? VK_IMAGE_ASPECT_PLANE_1_BIT :
+ VK_IMAGE_ASPECT_PLANE_2_BIT,
+ };
+ if (f->tiling != VK_IMAGE_TILING_LINEAR)
+ continue;
+ vkGetImageSubresourceLayout(hwctx->act_dev, f->img, &sub, &layout);
+ drm_desc->layers[i].planes[j].object_index = disjoint ? j : 0;
+ drm_desc->layers[i].planes[j].offset = layout.offset;
+ drm_desc->layers[i].planes[j].pitch = layout.rowPitch;
+ }
+ }
+
+ dst->width = src->width;
+ dst->height = src->height;
+ dst->data[0] = (uint8_t*)drm_desc;
+
+ av_log(hwfc, AV_LOG_DEBUG, "Mapped AVVkFrame to a DRM object!\n");
+
+ return 0;
+
+end:
+ av_free(drm_desc);
+ return err;
+}
+
+#if CONFIG_VAAPI
+static int vulkan_map_to_vaapi(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src, int flags)
+{
+ int err;
+ AVFrame *tmp = av_frame_alloc();
+ if (!tmp)
+ return AVERROR(ENOMEM);
+
+ tmp->format = AV_PIX_FMT_DRM_PRIME;
+
+ err = vulkan_map_to_drm(hwfc, tmp, src, flags);
+ if (err < 0)
+ goto fail;
+
+ err = av_hwframe_map(dst, tmp, flags);
+ if (err < 0)
+ goto fail;
+
+ err = ff_hwframe_map_replace(dst, src);
+
+fail:
+ av_frame_free(&tmp);
+ return err;
+}
+#endif
+#endif
+
+static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src, int flags)
+{
+ VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
+
+ switch (dst->format) {
+#if CONFIG_LIBDRM
+ case AV_PIX_FMT_DRM_PRIME:
+ if (p->extensions & EXT_DRM_MODIFIER_FLAGS)
+ return vulkan_map_to_drm(hwfc, dst, src, flags);
+#if CONFIG_VAAPI
+ case AV_PIX_FMT_VAAPI:
+ if (p->extensions & EXT_DRM_MODIFIER_FLAGS)
+ return vulkan_map_to_vaapi(hwfc, dst, src, flags);
+#endif
+#endif
+ default:
+ return AVERROR(ENOSYS);
+ }
+}
+
+typedef struct ImageBuffer {
+ VkBuffer buf;
+ VkDeviceMemory mem;
+ VkMemoryPropertyFlagBits flags;
+} ImageBuffer;
+
+static void free_buf(AVHWDeviceContext *ctx, ImageBuffer *buf)
+{
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ if (!buf)
+ return;
+
+ vkDestroyBuffer(hwctx->act_dev, buf->buf, hwctx->alloc);
+ vkFreeMemory(hwctx->act_dev, buf->mem, hwctx->alloc);
+}
+
+static int create_buf(AVHWDeviceContext *ctx, ImageBuffer *buf, size_t size,
+ VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags,
+ void *create_pnext, void *alloc_pnext)
+{
+ int err;
+ VkResult ret;
+ VkMemoryRequirements req;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+
+ VkBufferCreateInfo buf_spawn = {
+ .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+ .pNext = create_pnext,
+ .usage = usage,
+ .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
+ .size = size, /* Gets FFALIGNED during alloc if host visible
+ but should be ok */
+ };
+
+ ret = vkCreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &buf->buf);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ vkGetBufferMemoryRequirements(hwctx->act_dev, buf->buf, &req);
+
+ err = alloc_mem(ctx, &req, flags, alloc_pnext, &buf->flags, &buf->mem);
+ if (err)
+ return err;
+
+ ret = vkBindBufferMemory(hwctx->act_dev, buf->buf, buf->mem, 0);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
+ vk_ret2str(ret));
+ free_buf(ctx, buf);
+ return AVERROR_EXTERNAL;
+ }
+
+ return 0;
+}
+
+static int map_buffers(AVHWDeviceContext *ctx, ImageBuffer *buf, uint8_t *mem[],
+ int nb_buffers, int invalidate)
+{
+ VkResult ret;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VkMappedMemoryRange invalidate_ctx[AV_NUM_DATA_POINTERS];
+ int invalidate_count = 0;
+
+ for (int i = 0; i < nb_buffers; i++) {
+ ret = vkMapMemory(hwctx->act_dev, buf[i].mem, 0,
+ VK_WHOLE_SIZE, 0, (void **)&mem[i]);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+ }
+
+ if (!invalidate)
+ return 0;
+
+ for (int i = 0; i < nb_buffers; i++) {
+ const VkMappedMemoryRange ival_buf = {
+ .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+ .memory = buf[i].mem,
+ .size = VK_WHOLE_SIZE,
+ };
+ if (buf[i].flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
+ continue;
+ invalidate_ctx[invalidate_count++] = ival_buf;
+ }
+
+ if (invalidate_count) {
+ ret = vkInvalidateMappedMemoryRanges(hwctx->act_dev, invalidate_count,
+ invalidate_ctx);
+ if (ret != VK_SUCCESS)
+ av_log(ctx, AV_LOG_WARNING, "Failed to invalidate memory: %s\n",
+ vk_ret2str(ret));
+ }
+
+ return 0;
+}
+
+static int unmap_buffers(AVHWDeviceContext *ctx, ImageBuffer *buf,
+ int nb_buffers, int flush)
+{
+ int err = 0;
+ VkResult ret;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VkMappedMemoryRange flush_ctx[AV_NUM_DATA_POINTERS];
+ int flush_count = 0;
+
+ if (flush) {
+ for (int i = 0; i < nb_buffers; i++) {
+ const VkMappedMemoryRange flush_buf = {
+ .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+ .memory = buf[i].mem,
+ .size = VK_WHOLE_SIZE,
+ };
+ if (buf[i].flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
+ continue;
+ flush_ctx[flush_count++] = flush_buf;
+ }
+ }
+
+ if (flush_count) {
+ ret = vkFlushMappedMemoryRanges(hwctx->act_dev, flush_count, flush_ctx);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
+ vk_ret2str(ret));
+ err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
+ }
+ }
+
+ for (int i = 0; i < nb_buffers; i++)
+ vkUnmapMemory(hwctx->act_dev, buf[i].mem);
+
+ return err;
+}
+
+static int transfer_image_buf(AVHWDeviceContext *ctx, AVVkFrame *frame,
+ ImageBuffer *buffer, const int *stride, int w,
+ int h, enum AVPixelFormat pix_fmt, int to_buf)
+{
+ VkResult ret;
+ AVVulkanDeviceContext *hwctx = ctx->hwctx;
+ VulkanDevicePriv *s = ctx->internal->priv;
+
+ const int planes = av_pix_fmt_count_planes(pix_fmt);
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
+
+ VkCommandBufferBeginInfo cmd_start = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+ };
+
+ VkSubmitInfo s_info = {
+ .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ .commandBufferCount = 1,
+ .pCommandBuffers = &s->cmd_buf,
+ };
+
+ ret = vkBeginCommandBuffer(s->cmd_buf, &cmd_start);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ { /* Change the image layout to something more optimal for transfers */
+ VkImageMemoryBarrier bar = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ .srcAccessMask = 0,
+ .dstAccessMask = to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
+ VK_ACCESS_TRANSFER_WRITE_BIT,
+ .oldLayout = frame->layout,
+ .newLayout = to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+ .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+ .image = frame->img,
+ .subresourceRange.levelCount = 1,
+ .subresourceRange.layerCount = 1,
+ };
+
+ if (planes == 1) {
+ bar.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ } else {
+ bar.subresourceRange.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
+ bar.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_PLANE_1_BIT;
+ if (planes > 2)
+ bar.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
+ }
+
+ vkCmdPipelineBarrier(s->cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+ VK_PIPELINE_STAGE_TRANSFER_BIT,
+ 0, 0, NULL, 0, NULL, 1, &bar);
+
+ /* Update to the new layout */
+ frame->layout = bar.newLayout;
+ frame->access = bar.dstAccessMask;
+ }
+
+ /* Schedule a copy for each plane */
+ for (int i = 0; i < planes; i++) {
+ VkImageSubresourceLayers sub = {
+ .aspectMask = planes < 2 ? VK_IMAGE_ASPECT_COLOR_BIT :
+ i == 0 ? VK_IMAGE_ASPECT_PLANE_0_BIT :
+ i == 1 ? VK_IMAGE_ASPECT_PLANE_1_BIT :
+ VK_IMAGE_ASPECT_PLANE_2_BIT,
+ .layerCount = 1,
+ };
+ const int p_w = i > 0 ? AV_CEIL_RSHIFT(w, desc->log2_chroma_w) : w;
+ const int p_h = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
+ VkBufferImageCopy buf_reg = {
+ .bufferOffset = 0,
+ /* Buffer stride isn't in bytes, it's in samples, the implementation
+ * uses the image's VkFormat to know how many bytes per sample
+ * the buffer has. So we have to convert by dividing. Stupid.
+ * Won't work with YUVA or other planar formats with alpha. */
+ .bufferRowLength = stride[i] / desc->comp[i].step,
+ .bufferImageHeight = p_h,
+ .imageSubresource = sub,
+ .imageOffset = { 0 },
+ .imageExtent = { p_w, p_h, 1, },
+ };
+ if (to_buf)
+ vkCmdCopyImageToBuffer(s->cmd_buf, frame->img, frame->layout,
+ buffer[i].buf, 1, &buf_reg);
+ else
+ vkCmdCopyBufferToImage(s->cmd_buf, buffer[i].buf, frame->img,
+ frame->layout, 1, &buf_reg);
+ }
+
+ ret = vkEndCommandBuffer(s->cmd_buf);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ }
+
+ ret = vkQueueSubmit(s->cmd_queue, 1, &s_info, s->cmd_fence);
+ if (ret != VK_SUCCESS) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to submit command buffer: %s\n",
+ vk_ret2str(ret));
+ return AVERROR_EXTERNAL;
+ } else {
+ vkWaitForFences(hwctx->act_dev, 1, &s->cmd_fence, VK_TRUE, UINT64_MAX);
+ vkResetFences(hwctx->act_dev, 1, &s->cmd_fence);
+ }
+
+ return 0;
+}
+
+static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src)
+{
+ int err = 0;
+ AVFrame *map = NULL;
+ ImageBuffer buf[3] = { { 0 } };
+ AVVkFrame *f = (AVVkFrame *)dst->data[0];
+ AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
+ VulkanDevicePriv *p = dev_ctx->internal->priv;
+ const int planes = av_pix_fmt_count_planes(src->format);
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(src->format);
+ int map_host = p->extensions & EXT_EXTERNAL_HOST_MEMORY;
+
+ if ((src->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(src->format))) {
+ av_log(hwfc, AV_LOG_ERROR, "Unsupported source pixel format!\n");
+ return AVERROR(EINVAL);
+ }
+
+ if (src->width > hwfc->width || src->height > hwfc->height)
+ return AVERROR(EINVAL);
+
+ /* Path one - image is host visible and linear */
+ if (f->tiling == VK_IMAGE_TILING_LINEAR &&
+ f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
+ map = av_frame_alloc();
+ if (!map)
+ return AVERROR(ENOMEM);
+ map->format = src->format;
+
+ err = vulkan_map_frame(hwfc, map, dst, AV_HWFRAME_MAP_WRITE);
+ if (err)
+ goto end;
+
+ err = av_frame_copy(map, src);
+ goto end;
+ }
+
+ /* Path three - we can import _host_ memory and bind it to a buffer */
+ for (int i = 0; i < planes; i++) {
+ int h = dst->height;
+ int p_height = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
+ VkImportMemoryHostPointerInfoEXT import_desc = {
+ .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+ .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+ .pHostPointer = dst->data[i],
+ };
+ VkMemoryAllocateInfo import_mem_info = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ .pNext = &import_desc,
+ .allocationSize = p_height * dst->linesize[i],
+ };
+ err = create_buf(dev_ctx, &buf[i], import_mem_info.allocationSize,
+ VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL,
+ map_host ? &import_mem_info : NULL);
+ if (err)
+ goto end;
+ }
+
+ /* Path two - we can't import host memory so we have to do 2 copies */
+ if (!map_host) {
+ uint8_t *mem[3];
+ if ((err = map_buffers(dev_ctx, buf, mem, planes, 0)))
+ goto end;
+
+ for (int i = 0; i < planes; i++) {
+ int h = src->height;
+ int p_height = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
+ memcpy(mem[i], src->data[i], p_height*src->linesize[i]);
+ }
+
+ if ((err = unmap_buffers(dev_ctx, buf, planes, 1)))
+ goto end;
+ }
+
+ /* Copy buffer to image */
+ transfer_image_buf(dev_ctx, f, buf, src->linesize,
+ src->width, src->height, src->format, 0);
+
+end:
+ av_frame_free(&map);
+ for (int i = 0; i < planes; i++)
+ free_buf(dev_ctx, &buf[i]);
+
+ return err;
+}
+
+static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
+ const AVFrame *src)
+{
+ int err = 0;
+ AVFrame *map = NULL;
+ ImageBuffer buf[3] = { { 0 } };
+ AVVkFrame *f = (AVVkFrame *)src->data[0];
+ AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
+ VulkanDevicePriv *p = dev_ctx->internal->priv;
+ const int planes = av_pix_fmt_count_planes(dst->format);
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(dst->format);
+ const int map_host = p->extensions & EXT_EXTERNAL_HOST_MEMORY;
+
+ if (dst->width > hwfc->width || dst->height > hwfc->height)
+ return AVERROR(EINVAL);
+
+ /* Path one - image is host visible and linear */
+ if (f->tiling == VK_IMAGE_TILING_LINEAR &&
+ f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
+ map = av_frame_alloc();
+ if (!map)
+ return AVERROR(ENOMEM);
+ map->format = dst->format;
+
+ err = vulkan_map_frame(hwfc, map, src, AV_HWFRAME_MAP_READ);
+ if (err)
+ goto end;
+
+ err = av_frame_copy(dst, map);
+ goto end;
+ }
+
+ /* Path two */
+ for (int i = 0; i < planes; i++) {
+ int h = dst->height;
+ int p_height = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
+ VkImportMemoryHostPointerInfoEXT import_desc = {
+ .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+ .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+ .pHostPointer = dst->data[i],
+ };
+ VkMemoryAllocateInfo import_mem_info = {
+ .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+ .pNext = &import_desc,
+ .allocationSize = p_height * dst->linesize[i],
+ };
+ err = create_buf(dev_ctx, &buf[i], import_mem_info.allocationSize,
+ VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, NULL,
+ map_host ? &import_mem_info : NULL);
+ }
+
+ /* Copy image to buffer */
+ transfer_image_buf(dev_ctx, f, buf, dst->linesize,
+ dst->width, dst->height, dst->format, 1);
+
+ if (!map_host) {
+ uint8_t *mem[3];
+ map_buffers(dev_ctx, buf, mem, planes, 1);
+
+ for (int i = 0; i < planes; i++) {
+ int h = dst->height;
+ int p_height = i > 0 ? AV_CEIL_RSHIFT(h, desc->log2_chroma_h) : h;
+ memcpy(dst->data[i], mem[i], p_height * dst->linesize[i]);
+ }
+
+ unmap_buffers(dev_ctx, buf, planes, 0);
+ }
+
+end:
+ av_frame_free(&map);
+ for (int i = 0; i < planes; i++)
+ free_buf(dev_ctx, &buf[i]);
+
+ return err;
+}
+
+const HWContextType ff_hwcontext_type_vulkan = {
+ .type = AV_HWDEVICE_TYPE_VULKAN,
+ .name = "Vulkan",
+
+ .device_hwctx_size = sizeof(AVVulkanDeviceContext),
+ .device_priv_size = sizeof(VulkanDevicePriv),
+ .frames_hwctx_size = sizeof(AVVulkanFramesContext),
+
+ .device_init = &vulkan_device_init,
+ .device_create = &vulkan_device_create,
+ .device_derive = &vulkan_device_derive,
+
+ .frames_get_constraints = &vulkan_frames_get_constraints,
+ .frames_init = vulkan_frames_init,
+ .frames_get_buffer = vulkan_get_buffer,
+
+ .transfer_get_formats = vulkan_transfer_get_formats,
+ .transfer_data_to = vulkan_transfer_data_to,
+ .transfer_data_from = vulkan_transfer_data_from,
+
+ .map_to = vulkan_map_to,
+ .map_from = vulkan_map_from,
+
+ .pix_fmts = (const enum AVPixelFormat[]) {
+ AV_PIX_FMT_VULKAN,
+ AV_PIX_FMT_NONE
+ },
+};
new file mode 100644
@@ -0,0 +1,133 @@
+/*
+ * Vulkan hwcontext
+ * Copyright (c) 2018 Rostislav Pehlivanov <atomnuker@gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef AVUTIL_HWCONTEXT_VULKAN_H
+#define AVUTIL_HWCONTEXT_VULKAN_H
+
+#include <vulkan/vulkan.h>
+
+/**
+ * @file
+ * API-specific header for AV_HWDEVICE_TYPE_VULKAN.
+ *
+ * For user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
+ * with the data pointer set to an AVVkFrame.
+ */
+
+/**
+ * Main Vulkan context, allocated as AVHWDeviceContext.hwctx.
+ * All of these can be set before init to change what the context uses
+ */
+typedef struct AVVulkanDeviceContext {
+ /**
+ * Custom memory allocator, else NULL
+ */
+ const VkAllocationCallbacks *alloc;
+ /**
+ * Instance
+ */
+ VkInstance inst;
+ /**
+ * Physical device
+ */
+ VkPhysicalDevice phys_dev;
+ /**
+ * Activated physical device
+ */
+ VkDevice act_dev;
+ /**
+ * Queue family index for graphics
+ */
+ int queue_family_index;
+ /**
+ * Queue family index for transfer ops only. By default, the priority order
+ * is dedicated transfer > dedicated compute > graphics.
+ */
+ int queue_family_tx_index;
+ /**
+ * Queue family index for compute ops. Will be equal to the graphics
+ * one unless a dedicated transfer queue is found.
+ */
+ int queue_family_comp_index;
+} AVVulkanDeviceContext;
+
+/**
+ * Allocated as AVHWFramesContext.hwctx, used to set pool-specific options
+ */
+typedef struct AVVulkanFramesContext {
+ /**
+ * Controls the tiling of output frames.
+ */
+ VkImageTiling tiling;
+ /**
+ * Defines extra usage of output frames. This is bitwise OR'd with the
+ * standard usage flags (SAMPLED, STORAGE, TRANSFER_SRC and TRANSFER_DST).
+ */
+ VkImageUsageFlagBits usage;
+ /**
+ * Set to 1 to allocate all planes separately (disjoint images)
+ */
+ int disjoint;
+ /**
+ * Extension data for image creation. By default, if the extension is
+ * available, this will be chained to a VkImageFormatListCreateInfoKHR.
+ */
+ void *create_pnext;
+ /**
+ * Extension data for memory allocation. If the image is disjoint, this
+ * must be one per plane, otherwise just the first entry is used.
+ * This will be chained to VkExportMemoryAllocateInfo, which is used
+ * to make all pool images exportable to other APIs.
+ */
+ void *alloc_pnext[AV_NUM_DATA_POINTERS];
+} AVVulkanFramesContext;
+
+/*
+ * Frame structure, the VkFormat of the image will always match
+ * the pool's sw_format.
+ */
+typedef struct AVVkFrame {
+ VkImage img;
+ VkImageTiling tiling;
+ /**
+ * Always 1 for non-disjoint images, #planes for disjoint
+ */
+ int mem_count;
+ VkDeviceMemory mem[AV_NUM_DATA_POINTERS];
+ /**
+ * OR'd flags for all memory allocated
+ */
+ VkMemoryPropertyFlagBits flags;
+
+ /**
+ * Updated after every barrier
+ */
+ VkAccessFlagBits access;
+ VkImageLayout layout;
+} AVVkFrame;
+
+/**
+ * Converts AVPixelFormat to VkFormat, returns VK_FORMAT_UNDEFINED if unsupported
+ * by the hwcontext
+ */
+VkFormat av_vkfmt_from_pixfmt(enum AVPixelFormat p);
+
+#endif /* AVUTIL_HWCONTEXT_VULKAN_H */
@@ -2206,6 +2206,10 @@ static const AVPixFmtDescriptor av_pix_fmt_descriptors[AV_PIX_FMT_NB] = {
.name = "opencl",
.flags = AV_PIX_FMT_FLAG_HWACCEL,
},
+ [AV_PIX_FMT_VULKAN] = {
+ .name = "vulkan",
+ .flags = AV_PIX_FMT_FLAG_HWACCEL,
+ },
};
#if FF_API_PLUS1_MINUS1
FF_ENABLE_DEPRECATION_WARNINGS
@@ -333,6 +333,10 @@ enum AVPixelFormat {
AV_PIX_FMT_GRAY14BE, ///< Y , 14bpp, big-endian
AV_PIX_FMT_GRAY14LE, ///< Y , 14bpp, little-endian
+ /* Vulkan hardware images,
+ * data[0] contain an AVVkFrame */
+ AV_PIX_FMT_VULKAN,
+
AV_PIX_FMT_NB ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
@@ -79,8 +79,8 @@
*/
#define LIBAVUTIL_VERSION_MAJOR 56
-#define LIBAVUTIL_VERSION_MINOR 18
-#define LIBAVUTIL_VERSION_MICRO 102
+#define LIBAVUTIL_VERSION_MINOR 19
+#define LIBAVUTIL_VERSION_MICRO 100
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \
LIBAVUTIL_VERSION_MINOR, \
This commit adds a Vulkan hwcontext, currently capable of mapping DRM and VAAPI frames but additional functionality can be added later to support importing of D3D11 surfaces as well as exporting to various other APIs. This context requires the newest stable version of the Vulkan API. It makes use of every part of the Vulkan spec in order to ensure fastest possible uploading, downloading and mapping of frames. To be useful for non-RGB images an implementation with the YUV images extension is needed. All current implementations support that with the exception of AMD, though support is coming soon for Mesa. Signed-off-by: Rostislav Pehlivanov <atomnuker@gmail.com> --- configure | 10 + doc/APIchanges | 4 + libavutil/Makefile | 3 + libavutil/hwcontext.c | 4 + libavutil/hwcontext.h | 1 + libavutil/hwcontext_internal.h | 1 + libavutil/hwcontext_vulkan.c | 2225 ++++++++++++++++++++++++++++++++ libavutil/hwcontext_vulkan.h | 133 ++ libavutil/pixdesc.c | 4 + libavutil/pixfmt.h | 4 + libavutil/version.h | 4 +- 11 files changed, 2391 insertions(+), 2 deletions(-) create mode 100644 libavutil/hwcontext_vulkan.c create mode 100644 libavutil/hwcontext_vulkan.h