From d1435009ed914cc50533a71b1e25132376c28586 Mon Sep 17 00:00:00 2001 From: ReinUsesLisp Date: Thu, 24 Dec 2020 21:30:11 -0300 Subject: vulkan_common: Rename renderer_vulkan/wrapper.h to vulkan_common/vulkan_wrapper.h Allows sharing Vulkan wrapper code between different rendering backends. --- src/video_core/vulkan_common/vulkan_wrapper.cpp | 928 +++++++++++++++++ src/video_core/vulkan_common/vulkan_wrapper.h | 1213 +++++++++++++++++++++++ 2 files changed, 2141 insertions(+) create mode 100644 src/video_core/vulkan_common/vulkan_wrapper.cpp create mode 100644 src/video_core/vulkan_common/vulkan_wrapper.h (limited to 'src/video_core/vulkan_common') diff --git a/src/video_core/vulkan_common/vulkan_wrapper.cpp b/src/video_core/vulkan_common/vulkan_wrapper.cpp new file mode 100644 index 000000000..478402bbd --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_wrapper.cpp @@ -0,0 +1,928 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#include +#include +#include +#include +#include +#include +#include + +#include "common/common_types.h" +#include "common/logging/log.h" + +#include "video_core/vulkan_common/vulkan_wrapper.h" + +namespace Vulkan::vk { + +namespace { + +template +void SortPhysicalDevices(std::vector& devices, const InstanceDispatch& dld, + Func&& func) { + // Calling GetProperties calls Vulkan more than needed. But they are supposed to be cheap + // functions. + std::stable_sort(devices.begin(), devices.end(), + [&dld, &func](VkPhysicalDevice lhs, VkPhysicalDevice rhs) { + return func(vk::PhysicalDevice(lhs, dld).GetProperties(), + vk::PhysicalDevice(rhs, dld).GetProperties()); + }); +} + +void SortPhysicalDevicesPerVendor(std::vector& devices, + const InstanceDispatch& dld, + std::initializer_list vendor_ids) { + for (auto it = vendor_ids.end(); it != vendor_ids.begin();) { + --it; + SortPhysicalDevices(devices, dld, [id = *it](const auto& lhs, const auto& rhs) { + return lhs.vendorID == id && rhs.vendorID != id; + }); + } +} + +void SortPhysicalDevices(std::vector& devices, const InstanceDispatch& dld) { + // Sort by name, this will set a base and make GPUs with higher numbers appear first + // (e.g. GTX 1650 will intentionally be listed before a GTX 1080). + SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) { + return std::string_view{lhs.deviceName} > std::string_view{rhs.deviceName}; + }); + // Prefer discrete over non-discrete + SortPhysicalDevices(devices, dld, [](const auto& lhs, const auto& rhs) { + return lhs.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU && + rhs.deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU; + }); + // Prefer Nvidia over AMD, AMD over Intel, Intel over the rest. + SortPhysicalDevicesPerVendor(devices, dld, {0x10DE, 0x1002, 0x8086}); +} + +template +bool Proc(T& result, const InstanceDispatch& dld, const char* proc_name, + VkInstance instance = nullptr) noexcept { + result = reinterpret_cast(dld.vkGetInstanceProcAddr(instance, proc_name)); + return result != nullptr; +} + +template +void Proc(T& result, const DeviceDispatch& dld, const char* proc_name, VkDevice device) noexcept { + result = reinterpret_cast(dld.vkGetDeviceProcAddr(device, proc_name)); +} + +void Load(VkDevice device, DeviceDispatch& dld) noexcept { +#define X(name) Proc(dld.name, dld, #name, device) + X(vkAcquireNextImageKHR); + X(vkAllocateCommandBuffers); + X(vkAllocateDescriptorSets); + X(vkAllocateMemory); + X(vkBeginCommandBuffer); + X(vkBindBufferMemory); + X(vkBindImageMemory); + X(vkCmdBeginQuery); + X(vkCmdBeginRenderPass); + X(vkCmdBeginTransformFeedbackEXT); + X(vkCmdBeginDebugUtilsLabelEXT); + X(vkCmdBindDescriptorSets); + X(vkCmdBindIndexBuffer); + X(vkCmdBindPipeline); + X(vkCmdBindTransformFeedbackBuffersEXT); + X(vkCmdBindVertexBuffers); + X(vkCmdBlitImage); + X(vkCmdClearAttachments); + X(vkCmdCopyBuffer); + X(vkCmdCopyBufferToImage); + X(vkCmdCopyImage); + X(vkCmdCopyImageToBuffer); + X(vkCmdDispatch); + X(vkCmdDraw); + X(vkCmdDrawIndexed); + X(vkCmdEndQuery); + X(vkCmdEndRenderPass); + X(vkCmdEndTransformFeedbackEXT); + X(vkCmdEndDebugUtilsLabelEXT); + X(vkCmdFillBuffer); + X(vkCmdPipelineBarrier); + X(vkCmdPushConstants); + X(vkCmdSetBlendConstants); + X(vkCmdSetDepthBias); + X(vkCmdSetDepthBounds); + X(vkCmdSetEvent); + X(vkCmdSetScissor); + X(vkCmdSetStencilCompareMask); + X(vkCmdSetStencilReference); + X(vkCmdSetStencilWriteMask); + X(vkCmdSetViewport); + X(vkCmdWaitEvents); + X(vkCmdBindVertexBuffers2EXT); + X(vkCmdSetCullModeEXT); + X(vkCmdSetDepthBoundsTestEnableEXT); + X(vkCmdSetDepthCompareOpEXT); + X(vkCmdSetDepthTestEnableEXT); + X(vkCmdSetDepthWriteEnableEXT); + X(vkCmdSetFrontFaceEXT); + X(vkCmdSetPrimitiveTopologyEXT); + X(vkCmdSetStencilOpEXT); + X(vkCmdSetStencilTestEnableEXT); + X(vkCmdResolveImage); + X(vkCreateBuffer); + X(vkCreateBufferView); + X(vkCreateCommandPool); + X(vkCreateComputePipelines); + X(vkCreateDescriptorPool); + X(vkCreateDescriptorSetLayout); + X(vkCreateDescriptorUpdateTemplateKHR); + X(vkCreateEvent); + X(vkCreateFence); + X(vkCreateFramebuffer); + X(vkCreateGraphicsPipelines); + X(vkCreateImage); + X(vkCreateImageView); + X(vkCreatePipelineLayout); + X(vkCreateQueryPool); + X(vkCreateRenderPass); + X(vkCreateSampler); + X(vkCreateSemaphore); + X(vkCreateShaderModule); + X(vkCreateSwapchainKHR); + X(vkDestroyBuffer); + X(vkDestroyBufferView); + X(vkDestroyCommandPool); + X(vkDestroyDescriptorPool); + X(vkDestroyDescriptorSetLayout); + X(vkDestroyDescriptorUpdateTemplateKHR); + X(vkDestroyEvent); + X(vkDestroyFence); + X(vkDestroyFramebuffer); + X(vkDestroyImage); + X(vkDestroyImageView); + X(vkDestroyPipeline); + X(vkDestroyPipelineLayout); + X(vkDestroyQueryPool); + X(vkDestroyRenderPass); + X(vkDestroySampler); + X(vkDestroySemaphore); + X(vkDestroyShaderModule); + X(vkDestroySwapchainKHR); + X(vkDeviceWaitIdle); + X(vkEndCommandBuffer); + X(vkFreeCommandBuffers); + X(vkFreeDescriptorSets); + X(vkFreeMemory); + X(vkGetBufferMemoryRequirements); + X(vkGetDeviceQueue); + X(vkGetEventStatus); + X(vkGetFenceStatus); + X(vkGetImageMemoryRequirements); + X(vkGetQueryPoolResults); + X(vkGetSemaphoreCounterValueKHR); + X(vkMapMemory); + X(vkQueueSubmit); + X(vkResetFences); + X(vkResetQueryPoolEXT); + X(vkSetDebugUtilsObjectNameEXT); + X(vkSetDebugUtilsObjectTagEXT); + X(vkUnmapMemory); + X(vkUpdateDescriptorSetWithTemplateKHR); + X(vkUpdateDescriptorSets); + X(vkWaitForFences); + X(vkWaitSemaphoresKHR); +#undef X +} + +template +void SetObjectName(const DeviceDispatch* dld, VkDevice device, T handle, VkObjectType type, + const char* name) { + const VkDebugUtilsObjectNameInfoEXT name_info{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + .pNext = nullptr, + .objectType = VK_OBJECT_TYPE_IMAGE, + .objectHandle = reinterpret_cast(handle), + .pObjectName = name, + }; + Check(dld->vkSetDebugUtilsObjectNameEXT(device, &name_info)); +} + +} // Anonymous namespace + +bool Load(InstanceDispatch& dld) noexcept { +#define X(name) Proc(dld.name, dld, #name) + return X(vkCreateInstance) && X(vkEnumerateInstanceExtensionProperties) && + X(vkEnumerateInstanceLayerProperties); +#undef X +} + +bool Load(VkInstance instance, InstanceDispatch& dld) noexcept { +#define X(name) Proc(dld.name, dld, #name, instance) + // These functions may fail to load depending on the enabled extensions. + // Don't return a failure on these. + X(vkCreateDebugUtilsMessengerEXT); + X(vkDestroyDebugUtilsMessengerEXT); + X(vkDestroySurfaceKHR); + X(vkGetPhysicalDeviceFeatures2KHR); + X(vkGetPhysicalDeviceProperties2KHR); + X(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); + X(vkGetPhysicalDeviceSurfaceFormatsKHR); + X(vkGetPhysicalDeviceSurfacePresentModesKHR); + X(vkGetPhysicalDeviceSurfaceSupportKHR); + X(vkGetSwapchainImagesKHR); + X(vkQueuePresentKHR); + + return X(vkCreateDevice) && X(vkDestroyDevice) && X(vkDestroyDevice) && + X(vkEnumerateDeviceExtensionProperties) && X(vkEnumeratePhysicalDevices) && + X(vkGetDeviceProcAddr) && X(vkGetPhysicalDeviceFormatProperties) && + X(vkGetPhysicalDeviceMemoryProperties) && X(vkGetPhysicalDeviceProperties) && + X(vkGetPhysicalDeviceQueueFamilyProperties); +#undef X +} + +const char* Exception::what() const noexcept { + return ToString(result); +} + +const char* ToString(VkResult result) noexcept { + switch (result) { + case VkResult::VK_SUCCESS: + return "VK_SUCCESS"; + case VkResult::VK_NOT_READY: + return "VK_NOT_READY"; + case VkResult::VK_TIMEOUT: + return "VK_TIMEOUT"; + case VkResult::VK_EVENT_SET: + return "VK_EVENT_SET"; + case VkResult::VK_EVENT_RESET: + return "VK_EVENT_RESET"; + case VkResult::VK_INCOMPLETE: + return "VK_INCOMPLETE"; + case VkResult::VK_ERROR_OUT_OF_HOST_MEMORY: + return "VK_ERROR_OUT_OF_HOST_MEMORY"; + case VkResult::VK_ERROR_OUT_OF_DEVICE_MEMORY: + return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; + case VkResult::VK_ERROR_INITIALIZATION_FAILED: + return "VK_ERROR_INITIALIZATION_FAILED"; + case VkResult::VK_ERROR_DEVICE_LOST: + return "VK_ERROR_DEVICE_LOST"; + case VkResult::VK_ERROR_MEMORY_MAP_FAILED: + return "VK_ERROR_MEMORY_MAP_FAILED"; + case VkResult::VK_ERROR_LAYER_NOT_PRESENT: + return "VK_ERROR_LAYER_NOT_PRESENT"; + case VkResult::VK_ERROR_EXTENSION_NOT_PRESENT: + return "VK_ERROR_EXTENSION_NOT_PRESENT"; + case VkResult::VK_ERROR_FEATURE_NOT_PRESENT: + return "VK_ERROR_FEATURE_NOT_PRESENT"; + case VkResult::VK_ERROR_INCOMPATIBLE_DRIVER: + return "VK_ERROR_INCOMPATIBLE_DRIVER"; + case VkResult::VK_ERROR_TOO_MANY_OBJECTS: + return "VK_ERROR_TOO_MANY_OBJECTS"; + case VkResult::VK_ERROR_FORMAT_NOT_SUPPORTED: + return "VK_ERROR_FORMAT_NOT_SUPPORTED"; + case VkResult::VK_ERROR_FRAGMENTED_POOL: + return "VK_ERROR_FRAGMENTED_POOL"; + case VkResult::VK_ERROR_OUT_OF_POOL_MEMORY: + return "VK_ERROR_OUT_OF_POOL_MEMORY"; + case VkResult::VK_ERROR_INVALID_EXTERNAL_HANDLE: + return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; + case VkResult::VK_ERROR_SURFACE_LOST_KHR: + return "VK_ERROR_SURFACE_LOST_KHR"; + case VkResult::VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: + return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; + case VkResult::VK_SUBOPTIMAL_KHR: + return "VK_SUBOPTIMAL_KHR"; + case VkResult::VK_ERROR_OUT_OF_DATE_KHR: + return "VK_ERROR_OUT_OF_DATE_KHR"; + case VkResult::VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: + return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; + case VkResult::VK_ERROR_VALIDATION_FAILED_EXT: + return "VK_ERROR_VALIDATION_FAILED_EXT"; + case VkResult::VK_ERROR_INVALID_SHADER_NV: + return "VK_ERROR_INVALID_SHADER_NV"; + case VkResult::VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: + return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; + case VkResult::VK_ERROR_FRAGMENTATION_EXT: + return "VK_ERROR_FRAGMENTATION_EXT"; + case VkResult::VK_ERROR_NOT_PERMITTED_EXT: + return "VK_ERROR_NOT_PERMITTED_EXT"; + case VkResult::VK_ERROR_INVALID_DEVICE_ADDRESS_EXT: + return "VK_ERROR_INVALID_DEVICE_ADDRESS_EXT"; + case VkResult::VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: + return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; + case VkResult::VK_ERROR_UNKNOWN: + return "VK_ERROR_UNKNOWN"; + case VkResult::VK_ERROR_INCOMPATIBLE_VERSION_KHR: + return "VK_ERROR_INCOMPATIBLE_VERSION_KHR"; + case VkResult::VK_THREAD_IDLE_KHR: + return "VK_THREAD_IDLE_KHR"; + case VkResult::VK_THREAD_DONE_KHR: + return "VK_THREAD_DONE_KHR"; + case VkResult::VK_OPERATION_DEFERRED_KHR: + return "VK_OPERATION_DEFERRED_KHR"; + case VkResult::VK_OPERATION_NOT_DEFERRED_KHR: + return "VK_OPERATION_NOT_DEFERRED_KHR"; + case VkResult::VK_PIPELINE_COMPILE_REQUIRED_EXT: + return "VK_PIPELINE_COMPILE_REQUIRED_EXT"; + case VkResult::VK_RESULT_MAX_ENUM: + return "VK_RESULT_MAX_ENUM"; + } + return "Unknown"; +} + +void Destroy(VkInstance instance, const InstanceDispatch& dld) noexcept { + dld.vkDestroyInstance(instance, nullptr); +} + +void Destroy(VkDevice device, const InstanceDispatch& dld) noexcept { + dld.vkDestroyDevice(device, nullptr); +} + +void Destroy(VkDevice device, VkBuffer handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyBuffer(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkBufferView handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyBufferView(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkCommandPool handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyCommandPool(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDescriptorPool handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyDescriptorPool(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDescriptorSetLayout handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyDescriptorSetLayout(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDescriptorUpdateTemplateKHR handle, + const DeviceDispatch& dld) noexcept { + dld.vkDestroyDescriptorUpdateTemplateKHR(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkDeviceMemory handle, const DeviceDispatch& dld) noexcept { + dld.vkFreeMemory(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkEvent handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyEvent(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkFence handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyFence(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkFramebuffer handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyFramebuffer(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkImage handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyImage(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkImageView handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyImageView(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkPipeline handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyPipeline(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkPipelineLayout handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyPipelineLayout(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkQueryPool handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyQueryPool(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkRenderPass handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyRenderPass(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkSampler handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroySampler(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkSwapchainKHR handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroySwapchainKHR(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkSemaphore handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroySemaphore(device, handle, nullptr); +} + +void Destroy(VkDevice device, VkShaderModule handle, const DeviceDispatch& dld) noexcept { + dld.vkDestroyShaderModule(device, handle, nullptr); +} + +void Destroy(VkInstance instance, VkDebugUtilsMessengerEXT handle, + const InstanceDispatch& dld) noexcept { + dld.vkDestroyDebugUtilsMessengerEXT(instance, handle, nullptr); +} + +void Destroy(VkInstance instance, VkSurfaceKHR handle, const InstanceDispatch& dld) noexcept { + dld.vkDestroySurfaceKHR(instance, handle, nullptr); +} + +VkResult Free(VkDevice device, VkDescriptorPool handle, Span sets, + const DeviceDispatch& dld) noexcept { + return dld.vkFreeDescriptorSets(device, handle, sets.size(), sets.data()); +} + +VkResult Free(VkDevice device, VkCommandPool handle, Span buffers, + const DeviceDispatch& dld) noexcept { + dld.vkFreeCommandBuffers(device, handle, buffers.size(), buffers.data()); + return VK_SUCCESS; +} + +Instance Instance::Create(u32 version, Span layers, Span extensions, + InstanceDispatch& dispatch) noexcept { + const VkApplicationInfo application_info{ + .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO, + .pNext = nullptr, + .pApplicationName = "yuzu Emulator", + .applicationVersion = VK_MAKE_VERSION(0, 1, 0), + .pEngineName = "yuzu Emulator", + .engineVersion = VK_MAKE_VERSION(0, 1, 0), + .apiVersion = version, + }; + const VkInstanceCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + .pApplicationInfo = &application_info, + .enabledLayerCount = layers.size(), + .ppEnabledLayerNames = layers.data(), + .enabledExtensionCount = extensions.size(), + .ppEnabledExtensionNames = extensions.data(), + }; + + VkInstance instance; + if (dispatch.vkCreateInstance(&ci, nullptr, &instance) != VK_SUCCESS) { + // Failed to create the instance. + return {}; + } + if (!Proc(dispatch.vkDestroyInstance, dispatch, "vkDestroyInstance", instance)) { + // We successfully created an instance but the destroy function couldn't be loaded. + // This is a good moment to panic. + return {}; + } + + return Instance(instance, dispatch); +} + +std::optional> Instance::EnumeratePhysicalDevices() { + u32 num; + if (dld->vkEnumeratePhysicalDevices(handle, &num, nullptr) != VK_SUCCESS) { + return std::nullopt; + } + std::vector physical_devices(num); + if (dld->vkEnumeratePhysicalDevices(handle, &num, physical_devices.data()) != VK_SUCCESS) { + return std::nullopt; + } + SortPhysicalDevices(physical_devices, *dld); + return std::make_optional(std::move(physical_devices)); +} + +DebugCallback Instance::TryCreateDebugCallback( + PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept { + const VkDebugUtilsMessengerCreateInfoEXT ci{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + .pNext = nullptr, + .flags = 0, + .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + .pfnUserCallback = callback, + .pUserData = nullptr, + }; + + VkDebugUtilsMessengerEXT messenger; + if (dld->vkCreateDebugUtilsMessengerEXT(handle, &ci, nullptr, &messenger) != VK_SUCCESS) { + return {}; + } + return DebugCallback(messenger, handle, *dld); +} + +void Buffer::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const { + Check(dld->vkBindBufferMemory(owner, handle, memory, offset)); +} + +void Buffer::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER, name); +} + +void BufferView::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_BUFFER_VIEW, name); +} + +void Image::BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const { + Check(dld->vkBindImageMemory(owner, handle, memory, offset)); +} + +void Image::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE, name); +} + +void ImageView::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_IMAGE_VIEW, name); +} + +void DeviceMemory::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DEVICE_MEMORY, name); +} + +void Fence::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FENCE, name); +} + +void Framebuffer::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_FRAMEBUFFER, name); +} + +DescriptorSets DescriptorPool::Allocate(const VkDescriptorSetAllocateInfo& ai) const { + const std::size_t num = ai.descriptorSetCount; + std::unique_ptr sets = std::make_unique(num); + switch (const VkResult result = dld->vkAllocateDescriptorSets(owner, &ai, sets.get())) { + case VK_SUCCESS: + return DescriptorSets(std::move(sets), num, owner, handle, *dld); + case VK_ERROR_OUT_OF_POOL_MEMORY: + return {}; + default: + throw Exception(result); + } +} + +void DescriptorPool::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_DESCRIPTOR_POOL, name); +} + +CommandBuffers CommandPool::Allocate(std::size_t num_buffers, VkCommandBufferLevel level) const { + const VkCommandBufferAllocateInfo ai{ + .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + .pNext = nullptr, + .commandPool = handle, + .level = level, + .commandBufferCount = static_cast(num_buffers), + }; + + std::unique_ptr buffers = std::make_unique(num_buffers); + switch (const VkResult result = dld->vkAllocateCommandBuffers(owner, &ai, buffers.get())) { + case VK_SUCCESS: + return CommandBuffers(std::move(buffers), num_buffers, owner, handle, *dld); + case VK_ERROR_OUT_OF_POOL_MEMORY: + return {}; + default: + throw Exception(result); + } +} + +void CommandPool::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_COMMAND_POOL, name); +} + +std::vector SwapchainKHR::GetImages() const { + u32 num; + Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, nullptr)); + std::vector images(num); + Check(dld->vkGetSwapchainImagesKHR(owner, handle, &num, images.data())); + return images; +} + +void Event::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_EVENT, name); +} + +void ShaderModule::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SHADER_MODULE, name); +} + +void Semaphore::SetObjectNameEXT(const char* name) const { + SetObjectName(dld, owner, handle, VK_OBJECT_TYPE_SEMAPHORE, name); +} + +Device Device::Create(VkPhysicalDevice physical_device, Span queues_ci, + Span enabled_extensions, const void* next, + DeviceDispatch& dispatch) noexcept { + const VkDeviceCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + .pNext = next, + .flags = 0, + .queueCreateInfoCount = queues_ci.size(), + .pQueueCreateInfos = queues_ci.data(), + .enabledLayerCount = 0, + .ppEnabledLayerNames = nullptr, + .enabledExtensionCount = enabled_extensions.size(), + .ppEnabledExtensionNames = enabled_extensions.data(), + .pEnabledFeatures = nullptr, + }; + + VkDevice device; + if (dispatch.vkCreateDevice(physical_device, &ci, nullptr, &device) != VK_SUCCESS) { + return {}; + } + Load(device, dispatch); + return Device(device, dispatch); +} + +Queue Device::GetQueue(u32 family_index) const noexcept { + VkQueue queue; + dld->vkGetDeviceQueue(handle, family_index, 0, &queue); + return Queue(queue, *dld); +} + +Buffer Device::CreateBuffer(const VkBufferCreateInfo& ci) const { + VkBuffer object; + Check(dld->vkCreateBuffer(handle, &ci, nullptr, &object)); + return Buffer(object, handle, *dld); +} + +BufferView Device::CreateBufferView(const VkBufferViewCreateInfo& ci) const { + VkBufferView object; + Check(dld->vkCreateBufferView(handle, &ci, nullptr, &object)); + return BufferView(object, handle, *dld); +} + +Image Device::CreateImage(const VkImageCreateInfo& ci) const { + VkImage object; + Check(dld->vkCreateImage(handle, &ci, nullptr, &object)); + return Image(object, handle, *dld); +} + +ImageView Device::CreateImageView(const VkImageViewCreateInfo& ci) const { + VkImageView object; + Check(dld->vkCreateImageView(handle, &ci, nullptr, &object)); + return ImageView(object, handle, *dld); +} + +Semaphore Device::CreateSemaphore() const { + static constexpr VkSemaphoreCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + }; + return CreateSemaphore(ci); +} + +Semaphore Device::CreateSemaphore(const VkSemaphoreCreateInfo& ci) const { + VkSemaphore object; + Check(dld->vkCreateSemaphore(handle, &ci, nullptr, &object)); + return Semaphore(object, handle, *dld); +} + +Fence Device::CreateFence(const VkFenceCreateInfo& ci) const { + VkFence object; + Check(dld->vkCreateFence(handle, &ci, nullptr, &object)); + return Fence(object, handle, *dld); +} + +DescriptorPool Device::CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const { + VkDescriptorPool object; + Check(dld->vkCreateDescriptorPool(handle, &ci, nullptr, &object)); + return DescriptorPool(object, handle, *dld); +} + +RenderPass Device::CreateRenderPass(const VkRenderPassCreateInfo& ci) const { + VkRenderPass object; + Check(dld->vkCreateRenderPass(handle, &ci, nullptr, &object)); + return RenderPass(object, handle, *dld); +} + +DescriptorSetLayout Device::CreateDescriptorSetLayout( + const VkDescriptorSetLayoutCreateInfo& ci) const { + VkDescriptorSetLayout object; + Check(dld->vkCreateDescriptorSetLayout(handle, &ci, nullptr, &object)); + return DescriptorSetLayout(object, handle, *dld); +} + +PipelineLayout Device::CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const { + VkPipelineLayout object; + Check(dld->vkCreatePipelineLayout(handle, &ci, nullptr, &object)); + return PipelineLayout(object, handle, *dld); +} + +Pipeline Device::CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const { + VkPipeline object; + Check(dld->vkCreateGraphicsPipelines(handle, nullptr, 1, &ci, nullptr, &object)); + return Pipeline(object, handle, *dld); +} + +Pipeline Device::CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const { + VkPipeline object; + Check(dld->vkCreateComputePipelines(handle, nullptr, 1, &ci, nullptr, &object)); + return Pipeline(object, handle, *dld); +} + +Sampler Device::CreateSampler(const VkSamplerCreateInfo& ci) const { + VkSampler object; + Check(dld->vkCreateSampler(handle, &ci, nullptr, &object)); + return Sampler(object, handle, *dld); +} + +Framebuffer Device::CreateFramebuffer(const VkFramebufferCreateInfo& ci) const { + VkFramebuffer object; + Check(dld->vkCreateFramebuffer(handle, &ci, nullptr, &object)); + return Framebuffer(object, handle, *dld); +} + +CommandPool Device::CreateCommandPool(const VkCommandPoolCreateInfo& ci) const { + VkCommandPool object; + Check(dld->vkCreateCommandPool(handle, &ci, nullptr, &object)); + return CommandPool(object, handle, *dld); +} + +DescriptorUpdateTemplateKHR Device::CreateDescriptorUpdateTemplateKHR( + const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const { + VkDescriptorUpdateTemplateKHR object; + Check(dld->vkCreateDescriptorUpdateTemplateKHR(handle, &ci, nullptr, &object)); + return DescriptorUpdateTemplateKHR(object, handle, *dld); +} + +QueryPool Device::CreateQueryPool(const VkQueryPoolCreateInfo& ci) const { + VkQueryPool object; + Check(dld->vkCreateQueryPool(handle, &ci, nullptr, &object)); + return QueryPool(object, handle, *dld); +} + +ShaderModule Device::CreateShaderModule(const VkShaderModuleCreateInfo& ci) const { + VkShaderModule object; + Check(dld->vkCreateShaderModule(handle, &ci, nullptr, &object)); + return ShaderModule(object, handle, *dld); +} + +Event Device::CreateEvent() const { + static constexpr VkEventCreateInfo ci{ + .sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + .pNext = nullptr, + .flags = 0, + }; + + VkEvent object; + Check(dld->vkCreateEvent(handle, &ci, nullptr, &object)); + return Event(object, handle, *dld); +} + +SwapchainKHR Device::CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const { + VkSwapchainKHR object; + Check(dld->vkCreateSwapchainKHR(handle, &ci, nullptr, &object)); + return SwapchainKHR(object, handle, *dld); +} + +DeviceMemory Device::TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept { + VkDeviceMemory memory; + if (dld->vkAllocateMemory(handle, &ai, nullptr, &memory) != VK_SUCCESS) { + return {}; + } + return DeviceMemory(memory, handle, *dld); +} + +DeviceMemory Device::AllocateMemory(const VkMemoryAllocateInfo& ai) const { + VkDeviceMemory memory; + Check(dld->vkAllocateMemory(handle, &ai, nullptr, &memory)); + return DeviceMemory(memory, handle, *dld); +} + +VkMemoryRequirements Device::GetBufferMemoryRequirements(VkBuffer buffer) const noexcept { + VkMemoryRequirements requirements; + dld->vkGetBufferMemoryRequirements(handle, buffer, &requirements); + return requirements; +} + +VkMemoryRequirements Device::GetImageMemoryRequirements(VkImage image) const noexcept { + VkMemoryRequirements requirements; + dld->vkGetImageMemoryRequirements(handle, image, &requirements); + return requirements; +} + +void Device::UpdateDescriptorSets(Span writes, + Span copies) const noexcept { + dld->vkUpdateDescriptorSets(handle, writes.size(), writes.data(), copies.size(), copies.data()); +} + +VkPhysicalDeviceProperties PhysicalDevice::GetProperties() const noexcept { + VkPhysicalDeviceProperties properties; + dld->vkGetPhysicalDeviceProperties(physical_device, &properties); + return properties; +} + +void PhysicalDevice::GetProperties2KHR(VkPhysicalDeviceProperties2KHR& properties) const noexcept { + dld->vkGetPhysicalDeviceProperties2KHR(physical_device, &properties); +} + +VkPhysicalDeviceFeatures PhysicalDevice::GetFeatures() const noexcept { + VkPhysicalDeviceFeatures2KHR features2; + features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR; + features2.pNext = nullptr; + dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features2); + return features2.features; +} + +void PhysicalDevice::GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR& features) const noexcept { + dld->vkGetPhysicalDeviceFeatures2KHR(physical_device, &features); +} + +VkFormatProperties PhysicalDevice::GetFormatProperties(VkFormat format) const noexcept { + VkFormatProperties properties; + dld->vkGetPhysicalDeviceFormatProperties(physical_device, format, &properties); + return properties; +} + +std::vector PhysicalDevice::EnumerateDeviceExtensionProperties() const { + u32 num; + dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, nullptr); + std::vector properties(num); + dld->vkEnumerateDeviceExtensionProperties(physical_device, nullptr, &num, properties.data()); + return properties; +} + +std::vector PhysicalDevice::GetQueueFamilyProperties() const { + u32 num; + dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, nullptr); + std::vector properties(num); + dld->vkGetPhysicalDeviceQueueFamilyProperties(physical_device, &num, properties.data()); + return properties; +} + +bool PhysicalDevice::GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR surface) const { + VkBool32 supported; + Check(dld->vkGetPhysicalDeviceSurfaceSupportKHR(physical_device, queue_family_index, surface, + &supported)); + return supported == VK_TRUE; +} + +VkSurfaceCapabilitiesKHR PhysicalDevice::GetSurfaceCapabilitiesKHR(VkSurfaceKHR surface) const { + VkSurfaceCapabilitiesKHR capabilities; + Check(dld->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device, surface, &capabilities)); + return capabilities; +} + +std::vector PhysicalDevice::GetSurfaceFormatsKHR(VkSurfaceKHR surface) const { + u32 num; + Check(dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, nullptr)); + std::vector formats(num); + Check( + dld->vkGetPhysicalDeviceSurfaceFormatsKHR(physical_device, surface, &num, formats.data())); + return formats; +} + +std::vector PhysicalDevice::GetSurfacePresentModesKHR( + VkSurfaceKHR surface) const { + u32 num; + Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num, nullptr)); + std::vector modes(num); + Check(dld->vkGetPhysicalDeviceSurfacePresentModesKHR(physical_device, surface, &num, + modes.data())); + return modes; +} + +VkPhysicalDeviceMemoryProperties PhysicalDevice::GetMemoryProperties() const noexcept { + VkPhysicalDeviceMemoryProperties properties; + dld->vkGetPhysicalDeviceMemoryProperties(physical_device, &properties); + return properties; +} + +u32 AvailableVersion(const InstanceDispatch& dld) noexcept { + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; + if (!Proc(vkEnumerateInstanceVersion, dld, "vkEnumerateInstanceVersion")) { + // If the procedure is not found, Vulkan 1.0 is assumed + return VK_API_VERSION_1_0; + } + u32 version; + if (const VkResult result = vkEnumerateInstanceVersion(&version); result != VK_SUCCESS) { + LOG_ERROR(Render_Vulkan, "vkEnumerateInstanceVersion returned {}, assuming Vulkan 1.1", + ToString(result)); + return VK_API_VERSION_1_1; + } + return version; +} + +std::optional> EnumerateInstanceExtensionProperties( + const InstanceDispatch& dld) { + u32 num; + if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, nullptr) != VK_SUCCESS) { + return std::nullopt; + } + std::vector properties(num); + if (dld.vkEnumerateInstanceExtensionProperties(nullptr, &num, properties.data()) != + VK_SUCCESS) { + return std::nullopt; + } + return properties; +} + +std::optional> EnumerateInstanceLayerProperties( + const InstanceDispatch& dld) { + u32 num; + if (dld.vkEnumerateInstanceLayerProperties(&num, nullptr) != VK_SUCCESS) { + return std::nullopt; + } + std::vector properties(num); + if (dld.vkEnumerateInstanceLayerProperties(&num, properties.data()) != VK_SUCCESS) { + return std::nullopt; + } + return properties; +} + +} // namespace Vulkan::vk diff --git a/src/video_core/vulkan_common/vulkan_wrapper.h b/src/video_core/vulkan_common/vulkan_wrapper.h new file mode 100644 index 000000000..f9a184e00 --- /dev/null +++ b/src/video_core/vulkan_common/vulkan_wrapper.h @@ -0,0 +1,1213 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define VK_NO_PROTOTYPES +#include + +#include "common/common_types.h" + +#ifdef _MSC_VER +#pragma warning(disable : 26812) // Disable prefer enum class over enum +#endif + +namespace Vulkan::vk { + +/** + * Span for Vulkan arrays. + * Based on std::span but optimized for array access instead of iterators. + * Size returns uint32_t instead of size_t to ease interaction with Vulkan functions. + */ +template +class Span { +public: + using value_type = T; + using size_type = u32; + using difference_type = std::ptrdiff_t; + using reference = const T&; + using const_reference = const T&; + using pointer = const T*; + using const_pointer = const T*; + using iterator = const T*; + using const_iterator = const T*; + + /// Construct an empty span. + constexpr Span() noexcept = default; + + /// Construct an empty span + constexpr Span(std::nullptr_t) noexcept {} + + /// Construct a span from a single element. + constexpr Span(const T& value) noexcept : ptr{&value}, num{1} {} + + /// Construct a span from a range. + template + // requires std::data(const Range&) + // requires std::size(const Range&) + constexpr Span(const Range& range) : ptr{std::data(range)}, num{std::size(range)} {} + + /// Construct a span from a pointer and a size. + /// This is inteded for subranges. + constexpr Span(const T* ptr_, std::size_t num_) noexcept : ptr{ptr_}, num{num_} {} + + /// Returns the data pointer by the span. + constexpr const T* data() const noexcept { + return ptr; + } + + /// Returns the number of elements in the span. + /// @note Returns a 32 bits integer because most Vulkan functions expect this type. + constexpr u32 size() const noexcept { + return static_cast(num); + } + + /// Returns true when the span is empty. + constexpr bool empty() const noexcept { + return num == 0; + } + + /// Returns a reference to the element in the passed index. + /// @pre: index < size() + constexpr const T& operator[](std::size_t index) const noexcept { + return ptr[index]; + } + + /// Returns an iterator to the beginning of the span. + constexpr const T* begin() const noexcept { + return ptr; + } + + /// Returns an iterator to the end of the span. + constexpr const T* end() const noexcept { + return ptr + num; + } + + /// Returns an iterator to the beginning of the span. + constexpr const T* cbegin() const noexcept { + return ptr; + } + + /// Returns an iterator to the end of the span. + constexpr const T* cend() const noexcept { + return ptr + num; + } + +private: + const T* ptr = nullptr; + std::size_t num = 0; +}; + +/// Vulkan exception generated from a VkResult. +class Exception final : public std::exception { +public: + /// Construct the exception with a result. + /// @pre result != VK_SUCCESS + explicit Exception(VkResult result_) : result{result_} {} + virtual ~Exception() = default; + + const char* what() const noexcept override; + +private: + VkResult result; +}; + +/// Converts a VkResult enum into a rodata string +const char* ToString(VkResult) noexcept; + +/// Throws a Vulkan exception if result is not success. +inline void Check(VkResult result) { + if (result != VK_SUCCESS) { + throw Exception(result); + } +} + +/// Throws a Vulkan exception if result is an error. +/// @return result +inline VkResult Filter(VkResult result) { + if (result < 0) { + throw Exception(result); + } + return result; +} + +/// Table holding Vulkan instance function pointers. +struct InstanceDispatch { + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + + PFN_vkCreateInstance vkCreateInstance; + PFN_vkDestroyInstance vkDestroyInstance; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; + + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; + PFN_vkCreateDevice vkCreateDevice; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; + PFN_vkDestroyDevice vkDestroyDevice; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; + PFN_vkQueuePresentKHR vkQueuePresentKHR; +}; + +/// Table holding Vulkan device function pointers. +struct DeviceDispatch : public InstanceDispatch { + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; + PFN_vkAllocateMemory vkAllocateMemory; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer; + PFN_vkBindBufferMemory vkBindBufferMemory; + PFN_vkBindImageMemory vkBindImageMemory; + PFN_vkCmdBeginQuery vkCmdBeginQuery; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; + PFN_vkCmdBindPipeline vkCmdBindPipeline; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; + PFN_vkCmdBlitImage vkCmdBlitImage; + PFN_vkCmdClearAttachments vkCmdClearAttachments; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; + PFN_vkCmdCopyImage vkCmdCopyImage; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; + PFN_vkCmdDispatch vkCmdDispatch; + PFN_vkCmdDraw vkCmdDraw; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed; + PFN_vkCmdEndQuery vkCmdEndQuery; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; + PFN_vkCmdFillBuffer vkCmdFillBuffer; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; + PFN_vkCmdPushConstants vkCmdPushConstants; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; + PFN_vkCmdSetEvent vkCmdSetEvent; + PFN_vkCmdSetScissor vkCmdSetScissor; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; + PFN_vkCmdSetViewport vkCmdSetViewport; + PFN_vkCmdWaitEvents vkCmdWaitEvents; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; + PFN_vkCmdResolveImage vkCmdResolveImage; + PFN_vkCreateBuffer vkCreateBuffer; + PFN_vkCreateBufferView vkCreateBufferView; + PFN_vkCreateCommandPool vkCreateCommandPool; + PFN_vkCreateComputePipelines vkCreateComputePipelines; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; + PFN_vkCreateEvent vkCreateEvent; + PFN_vkCreateFence vkCreateFence; + PFN_vkCreateFramebuffer vkCreateFramebuffer; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; + PFN_vkCreateImage vkCreateImage; + PFN_vkCreateImageView vkCreateImageView; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout; + PFN_vkCreateQueryPool vkCreateQueryPool; + PFN_vkCreateRenderPass vkCreateRenderPass; + PFN_vkCreateSampler vkCreateSampler; + PFN_vkCreateSemaphore vkCreateSemaphore; + PFN_vkCreateShaderModule vkCreateShaderModule; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; + PFN_vkDestroyBuffer vkDestroyBuffer; + PFN_vkDestroyBufferView vkDestroyBufferView; + PFN_vkDestroyCommandPool vkDestroyCommandPool; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; + PFN_vkDestroyEvent vkDestroyEvent; + PFN_vkDestroyFence vkDestroyFence; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer; + PFN_vkDestroyImage vkDestroyImage; + PFN_vkDestroyImageView vkDestroyImageView; + PFN_vkDestroyPipeline vkDestroyPipeline; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; + PFN_vkDestroyQueryPool vkDestroyQueryPool; + PFN_vkDestroyRenderPass vkDestroyRenderPass; + PFN_vkDestroySampler vkDestroySampler; + PFN_vkDestroySemaphore vkDestroySemaphore; + PFN_vkDestroyShaderModule vkDestroyShaderModule; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle; + PFN_vkEndCommandBuffer vkEndCommandBuffer; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets; + PFN_vkFreeMemory vkFreeMemory; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + PFN_vkGetDeviceQueue vkGetDeviceQueue; + PFN_vkGetEventStatus vkGetEventStatus; + PFN_vkGetFenceStatus vkGetFenceStatus; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; + PFN_vkMapMemory vkMapMemory; + PFN_vkQueueSubmit vkQueueSubmit; + PFN_vkResetFences vkResetFences; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; + PFN_vkUnmapMemory vkUnmapMemory; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; + PFN_vkWaitForFences vkWaitForFences; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; +}; + +/// Loads instance agnostic function pointers. +/// @return True on success, false on error. +bool Load(InstanceDispatch&) noexcept; + +/// Loads instance function pointers. +/// @return True on success, false on error. +bool Load(VkInstance, InstanceDispatch&) noexcept; + +void Destroy(VkInstance, const InstanceDispatch&) noexcept; +void Destroy(VkDevice, const InstanceDispatch&) noexcept; + +void Destroy(VkDevice, VkBuffer, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkBufferView, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkCommandPool, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDescriptorPool, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDescriptorSetLayout, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDescriptorUpdateTemplateKHR, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkDeviceMemory, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkEvent, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkFence, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkFramebuffer, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkImage, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkImageView, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkPipeline, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkPipelineLayout, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkQueryPool, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkRenderPass, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkSampler, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkSwapchainKHR, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkSemaphore, const DeviceDispatch&) noexcept; +void Destroy(VkDevice, VkShaderModule, const DeviceDispatch&) noexcept; +void Destroy(VkInstance, VkDebugUtilsMessengerEXT, const InstanceDispatch&) noexcept; +void Destroy(VkInstance, VkSurfaceKHR, const InstanceDispatch&) noexcept; + +VkResult Free(VkDevice, VkDescriptorPool, Span, const DeviceDispatch&) noexcept; +VkResult Free(VkDevice, VkCommandPool, Span, const DeviceDispatch&) noexcept; + +template +class Handle; + +/// Handle with an owning type. +/// Analogue to std::unique_ptr. +template +class Handle { +public: + /// Construct a handle and hold it's ownership. + explicit Handle(Type handle_, OwnerType owner_, const Dispatch& dld_) noexcept + : handle{handle_}, owner{owner_}, dld{&dld_} {} + + /// Construct an empty handle. + Handle() = default; + + /// Copying Vulkan objects is not supported and will never be. + Handle(const Handle&) = delete; + Handle& operator=(const Handle&) = delete; + + /// Construct a handle transfering the ownership from another handle. + Handle(Handle&& rhs) noexcept + : handle{std::exchange(rhs.handle, nullptr)}, owner{rhs.owner}, dld{rhs.dld} {} + + /// Assign the current handle transfering the ownership from another handle. + /// Destroys any previously held object. + Handle& operator=(Handle&& rhs) noexcept { + Release(); + handle = std::exchange(rhs.handle, nullptr); + owner = rhs.owner; + dld = rhs.dld; + return *this; + } + + /// Destroys the current handle if it existed. + ~Handle() noexcept { + Release(); + } + + /// Destroys any held object. + void reset() noexcept { + Release(); + handle = nullptr; + } + + /// Returns the address of the held object. + /// Intended for Vulkan structures that expect a pointer to an array. + const Type* address() const noexcept { + return std::addressof(handle); + } + + /// Returns the held Vulkan handle. + Type operator*() const noexcept { + return handle; + } + + /// Returns true when there's a held object. + explicit operator bool() const noexcept { + return handle != nullptr; + } + +protected: + Type handle = nullptr; + OwnerType owner = nullptr; + const Dispatch* dld = nullptr; + +private: + /// Destroys the held object if it exists. + void Release() noexcept { + if (handle) { + Destroy(owner, handle, *dld); + } + } +}; + +/// Dummy type used to specify a handle has no owner. +struct NoOwner {}; + +/// Handle without an owning type. +/// Analogue to std::unique_ptr +template +class Handle { +public: + /// Construct a handle and hold it's ownership. + explicit Handle(Type handle_, const Dispatch& dld_) noexcept : handle{handle_}, dld{&dld_} {} + + /// Construct an empty handle. + Handle() noexcept = default; + + /// Copying Vulkan objects is not supported and will never be. + Handle(const Handle&) = delete; + Handle& operator=(const Handle&) = delete; + + /// Construct a handle transfering ownership from another handle. + Handle(Handle&& rhs) noexcept : handle{std::exchange(rhs.handle, nullptr)}, dld{rhs.dld} {} + + /// Assign the current handle transfering the ownership from another handle. + /// Destroys any previously held object. + Handle& operator=(Handle&& rhs) noexcept { + Release(); + handle = std::exchange(rhs.handle, nullptr); + dld = rhs.dld; + return *this; + } + + /// Destroys the current handle if it existed. + ~Handle() noexcept { + Release(); + } + + /// Destroys any held object. + void reset() noexcept { + Release(); + handle = nullptr; + } + + /// Returns the address of the held object. + /// Intended for Vulkan structures that expect a pointer to an array. + const Type* address() const noexcept { + return std::addressof(handle); + } + + /// Returns the held Vulkan handle. + Type operator*() const noexcept { + return handle; + } + + /// Returns true when there's a held object. + operator bool() const noexcept { + return handle != nullptr; + } + +protected: + Type handle = nullptr; + const Dispatch* dld = nullptr; + +private: + /// Destroys the held object if it exists. + void Release() noexcept { + if (handle) { + Destroy(handle, *dld); + } + } +}; + +/// Array of a pool allocation. +/// Analogue to std::vector +template +class PoolAllocations { +public: + /// Construct an empty allocation. + PoolAllocations() = default; + + /// Construct an allocation. Errors are reported through IsOutOfPoolMemory(). + explicit PoolAllocations(std::unique_ptr allocations_, std::size_t num_, + VkDevice device_, PoolType pool_, const DeviceDispatch& dld_) noexcept + : allocations{std::move(allocations_)}, num{num_}, device{device_}, pool{pool_}, + dld{&dld_} {} + + /// Copying Vulkan allocations is not supported and will never be. + PoolAllocations(const PoolAllocations&) = delete; + PoolAllocations& operator=(const PoolAllocations&) = delete; + + /// Construct an allocation transfering ownership from another allocation. + PoolAllocations(PoolAllocations&& rhs) noexcept + : allocations{std::move(rhs.allocations)}, num{rhs.num}, device{rhs.device}, pool{rhs.pool}, + dld{rhs.dld} {} + + /// Assign an allocation transfering ownership from another allocation. + /// Releases any previously held allocation. + PoolAllocations& operator=(PoolAllocations&& rhs) noexcept { + Release(); + allocations = std::move(rhs.allocations); + num = rhs.num; + device = rhs.device; + pool = rhs.pool; + dld = rhs.dld; + return *this; + } + + /// Destroys any held allocation. + ~PoolAllocations() { + Release(); + } + + /// Returns the number of allocations. + std::size_t size() const noexcept { + return num; + } + + /// Returns a pointer to the array of allocations. + AllocationType const* data() const noexcept { + return allocations.get(); + } + + /// Returns the allocation in the specified index. + /// @pre index < size() + AllocationType operator[](std::size_t index) const noexcept { + return allocations[index]; + } + + /// True when a pool fails to construct. + bool IsOutOfPoolMemory() const noexcept { + return !device; + } + +private: + /// Destroys the held allocations if they exist. + void Release() noexcept { + if (!allocations) { + return; + } + const Span span(allocations.get(), num); + const VkResult result = Free(device, pool, span, *dld); + // There's no way to report errors from a destructor. + if (result != VK_SUCCESS) { + std::terminate(); + } + } + + std::unique_ptr allocations; + std::size_t num = 0; + VkDevice device = nullptr; + PoolType pool = nullptr; + const DeviceDispatch* dld = nullptr; +}; + +using DebugCallback = Handle; +using DescriptorSetLayout = Handle; +using DescriptorUpdateTemplateKHR = Handle; +using Pipeline = Handle; +using PipelineLayout = Handle; +using QueryPool = Handle; +using RenderPass = Handle; +using Sampler = Handle; +using SurfaceKHR = Handle; + +using DescriptorSets = PoolAllocations; +using CommandBuffers = PoolAllocations; + +/// Vulkan instance owning handle. +class Instance : public Handle { + using Handle::Handle; + +public: + /// Creates a Vulkan instance. Use "operator bool" for error handling. + static Instance Create(u32 version, Span layers, Span extensions, + InstanceDispatch& dispatch) noexcept; + + /// Enumerates physical devices. + /// @return Physical devices and an empty handle on failure. + std::optional> EnumeratePhysicalDevices(); + + /// Tries to create a debug callback messenger. Returns an empty handle on failure. + DebugCallback TryCreateDebugCallback(PFN_vkDebugUtilsMessengerCallbackEXT callback) noexcept; +}; + +class Queue { +public: + /// Construct an empty queue handle. + constexpr Queue() noexcept = default; + + /// Construct a queue handle. + constexpr Queue(VkQueue queue_, const DeviceDispatch& dld_) noexcept + : queue{queue_}, dld{&dld_} {} + + VkResult Submit(Span submit_infos, + VkFence fence = VK_NULL_HANDLE) const noexcept { + return dld->vkQueueSubmit(queue, submit_infos.size(), submit_infos.data(), fence); + } + + VkResult Present(const VkPresentInfoKHR& present_info) const noexcept { + return dld->vkQueuePresentKHR(queue, &present_info); + } + +private: + VkQueue queue = nullptr; + const DeviceDispatch* dld = nullptr; +}; + +class Buffer : public Handle { + using Handle::Handle; + +public: + /// Attaches a memory allocation. + void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class BufferView : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class Image : public Handle { + using Handle::Handle; + +public: + /// Attaches a memory allocation. + void BindMemory(VkDeviceMemory memory, VkDeviceSize offset) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class ImageView : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class DeviceMemory : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + u8* Map(VkDeviceSize offset, VkDeviceSize size) const { + void* data; + Check(dld->vkMapMemory(owner, handle, offset, size, 0, &data)); + return static_cast(data); + } + + void Unmap() const noexcept { + dld->vkUnmapMemory(owner, handle); + } +}; + +class Fence : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + VkResult Wait(u64 timeout = std::numeric_limits::max()) const noexcept { + return dld->vkWaitForFences(owner, 1, &handle, true, timeout); + } + + VkResult GetStatus() const noexcept { + return dld->vkGetFenceStatus(owner, handle); + } + + void Reset() const { + Check(dld->vkResetFences(owner, 1, &handle)); + } +}; + +class Framebuffer : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class DescriptorPool : public Handle { + using Handle::Handle; + +public: + DescriptorSets Allocate(const VkDescriptorSetAllocateInfo& ai) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class CommandPool : public Handle { + using Handle::Handle; + +public: + CommandBuffers Allocate(std::size_t num_buffers, + VkCommandBufferLevel level = VK_COMMAND_BUFFER_LEVEL_PRIMARY) const; + + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class SwapchainKHR : public Handle { + using Handle::Handle; + +public: + std::vector GetImages() const; +}; + +class Event : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + VkResult GetStatus() const noexcept { + return dld->vkGetEventStatus(owner, handle); + } +}; + +class ShaderModule : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; +}; + +class Semaphore : public Handle { + using Handle::Handle; + +public: + /// Set object name. + void SetObjectNameEXT(const char* name) const; + + [[nodiscard]] u64 GetCounter() const { + u64 value; + Check(dld->vkGetSemaphoreCounterValueKHR(owner, handle, &value)); + return value; + } + + /** + * Waits for a timeline semaphore on the host. + * + * @param value Value to wait + * @param timeout Time in nanoseconds to timeout + * @return True on successful wait, false on timeout + */ + bool Wait(u64 value, u64 timeout = std::numeric_limits::max()) const { + const VkSemaphoreWaitInfoKHR wait_info{ + .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + .pNext = nullptr, + .flags = 0, + .semaphoreCount = 1, + .pSemaphores = &handle, + .pValues = &value, + }; + const VkResult result = dld->vkWaitSemaphoresKHR(owner, &wait_info, timeout); + switch (result) { + case VK_SUCCESS: + return true; + case VK_TIMEOUT: + return false; + default: + throw Exception(result); + } + } +}; + +class Device : public Handle { + using Handle::Handle; + +public: + static Device Create(VkPhysicalDevice physical_device, Span queues_ci, + Span enabled_extensions, const void* next, + DeviceDispatch& dispatch) noexcept; + + Queue GetQueue(u32 family_index) const noexcept; + + Buffer CreateBuffer(const VkBufferCreateInfo& ci) const; + + BufferView CreateBufferView(const VkBufferViewCreateInfo& ci) const; + + Image CreateImage(const VkImageCreateInfo& ci) const; + + ImageView CreateImageView(const VkImageViewCreateInfo& ci) const; + + Semaphore CreateSemaphore() const; + + Semaphore CreateSemaphore(const VkSemaphoreCreateInfo& ci) const; + + Fence CreateFence(const VkFenceCreateInfo& ci) const; + + DescriptorPool CreateDescriptorPool(const VkDescriptorPoolCreateInfo& ci) const; + + RenderPass CreateRenderPass(const VkRenderPassCreateInfo& ci) const; + + DescriptorSetLayout CreateDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo& ci) const; + + PipelineLayout CreatePipelineLayout(const VkPipelineLayoutCreateInfo& ci) const; + + Pipeline CreateGraphicsPipeline(const VkGraphicsPipelineCreateInfo& ci) const; + + Pipeline CreateComputePipeline(const VkComputePipelineCreateInfo& ci) const; + + Sampler CreateSampler(const VkSamplerCreateInfo& ci) const; + + Framebuffer CreateFramebuffer(const VkFramebufferCreateInfo& ci) const; + + CommandPool CreateCommandPool(const VkCommandPoolCreateInfo& ci) const; + + DescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR( + const VkDescriptorUpdateTemplateCreateInfoKHR& ci) const; + + QueryPool CreateQueryPool(const VkQueryPoolCreateInfo& ci) const; + + ShaderModule CreateShaderModule(const VkShaderModuleCreateInfo& ci) const; + + Event CreateEvent() const; + + SwapchainKHR CreateSwapchainKHR(const VkSwapchainCreateInfoKHR& ci) const; + + DeviceMemory TryAllocateMemory(const VkMemoryAllocateInfo& ai) const noexcept; + + DeviceMemory AllocateMemory(const VkMemoryAllocateInfo& ai) const; + + VkMemoryRequirements GetBufferMemoryRequirements(VkBuffer buffer) const noexcept; + + VkMemoryRequirements GetImageMemoryRequirements(VkImage image) const noexcept; + + void UpdateDescriptorSets(Span writes, + Span copies) const noexcept; + + void UpdateDescriptorSet(VkDescriptorSet set, VkDescriptorUpdateTemplateKHR update_template, + const void* data) const noexcept { + dld->vkUpdateDescriptorSetWithTemplateKHR(handle, set, update_template, data); + } + + VkResult AcquireNextImageKHR(VkSwapchainKHR swapchain, u64 timeout, VkSemaphore semaphore, + VkFence fence, u32* image_index) const noexcept { + return dld->vkAcquireNextImageKHR(handle, swapchain, timeout, semaphore, fence, + image_index); + } + + VkResult WaitIdle() const noexcept { + return dld->vkDeviceWaitIdle(handle); + } + + void ResetQueryPoolEXT(VkQueryPool query_pool, u32 first, u32 count) const noexcept { + dld->vkResetQueryPoolEXT(handle, query_pool, first, count); + } + + VkResult GetQueryResults(VkQueryPool query_pool, u32 first, u32 count, std::size_t data_size, + void* data, VkDeviceSize stride, + VkQueryResultFlags flags) const noexcept { + return dld->vkGetQueryPoolResults(handle, query_pool, first, count, data_size, data, stride, + flags); + } +}; + +class PhysicalDevice { +public: + constexpr PhysicalDevice() noexcept = default; + + constexpr PhysicalDevice(VkPhysicalDevice physical_device_, + const InstanceDispatch& dld_) noexcept + : physical_device{physical_device_}, dld{&dld_} {} + + constexpr operator VkPhysicalDevice() const noexcept { + return physical_device; + } + + VkPhysicalDeviceProperties GetProperties() const noexcept; + + void GetProperties2KHR(VkPhysicalDeviceProperties2KHR&) const noexcept; + + VkPhysicalDeviceFeatures GetFeatures() const noexcept; + + void GetFeatures2KHR(VkPhysicalDeviceFeatures2KHR&) const noexcept; + + VkFormatProperties GetFormatProperties(VkFormat) const noexcept; + + std::vector EnumerateDeviceExtensionProperties() const; + + std::vector GetQueueFamilyProperties() const; + + bool GetSurfaceSupportKHR(u32 queue_family_index, VkSurfaceKHR) const; + + VkSurfaceCapabilitiesKHR GetSurfaceCapabilitiesKHR(VkSurfaceKHR) const; + + std::vector GetSurfaceFormatsKHR(VkSurfaceKHR) const; + + std::vector GetSurfacePresentModesKHR(VkSurfaceKHR) const; + + VkPhysicalDeviceMemoryProperties GetMemoryProperties() const noexcept; + +private: + VkPhysicalDevice physical_device = nullptr; + const InstanceDispatch* dld = nullptr; +}; + +class CommandBuffer { +public: + CommandBuffer() noexcept = default; + + explicit CommandBuffer(VkCommandBuffer handle_, const DeviceDispatch& dld_) noexcept + : handle{handle_}, dld{&dld_} {} + + const VkCommandBuffer* address() const noexcept { + return &handle; + } + + void Begin(const VkCommandBufferBeginInfo& begin_info) const { + Check(dld->vkBeginCommandBuffer(handle, &begin_info)); + } + + void End() const { + Check(dld->vkEndCommandBuffer(handle)); + } + + void BeginRenderPass(const VkRenderPassBeginInfo& renderpass_bi, + VkSubpassContents contents) const noexcept { + dld->vkCmdBeginRenderPass(handle, &renderpass_bi, contents); + } + + void EndRenderPass() const noexcept { + dld->vkCmdEndRenderPass(handle); + } + + void BeginQuery(VkQueryPool query_pool, u32 query, VkQueryControlFlags flags) const noexcept { + dld->vkCmdBeginQuery(handle, query_pool, query, flags); + } + + void EndQuery(VkQueryPool query_pool, u32 query) const noexcept { + dld->vkCmdEndQuery(handle, query_pool, query); + } + + void BindDescriptorSets(VkPipelineBindPoint bind_point, VkPipelineLayout layout, u32 first, + Span sets, Span dynamic_offsets) const noexcept { + dld->vkCmdBindDescriptorSets(handle, bind_point, layout, first, sets.size(), sets.data(), + dynamic_offsets.size(), dynamic_offsets.data()); + } + + void BindPipeline(VkPipelineBindPoint bind_point, VkPipeline pipeline) const noexcept { + dld->vkCmdBindPipeline(handle, bind_point, pipeline); + } + + void BindIndexBuffer(VkBuffer buffer, VkDeviceSize offset, + VkIndexType index_type) const noexcept { + dld->vkCmdBindIndexBuffer(handle, buffer, offset, index_type); + } + + void BindVertexBuffers(u32 first, u32 count, const VkBuffer* buffers, + const VkDeviceSize* offsets) const noexcept { + dld->vkCmdBindVertexBuffers(handle, first, count, buffers, offsets); + } + + void BindVertexBuffer(u32 binding, VkBuffer buffer, VkDeviceSize offset) const noexcept { + BindVertexBuffers(binding, 1, &buffer, &offset); + } + + void Draw(u32 vertex_count, u32 instance_count, u32 first_vertex, + u32 first_instance) const noexcept { + dld->vkCmdDraw(handle, vertex_count, instance_count, first_vertex, first_instance); + } + + void DrawIndexed(u32 index_count, u32 instance_count, u32 first_index, u32 vertex_offset, + u32 first_instance) const noexcept { + dld->vkCmdDrawIndexed(handle, index_count, instance_count, first_index, vertex_offset, + first_instance); + } + + void ClearAttachments(Span attachments, + Span rects) const noexcept { + dld->vkCmdClearAttachments(handle, attachments.size(), attachments.data(), rects.size(), + rects.data()); + } + + void BlitImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image, + VkImageLayout dst_layout, Span regions, + VkFilter filter) const noexcept { + dld->vkCmdBlitImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(), + regions.data(), filter); + } + + void ResolveImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image, + VkImageLayout dst_layout, Span regions) { + dld->vkCmdResolveImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(), + regions.data()); + } + + void Dispatch(u32 x, u32 y, u32 z) const noexcept { + dld->vkCmdDispatch(handle, x, y, z); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags, Span memory_barriers, + Span buffer_barriers, + Span image_barriers) const noexcept { + dld->vkCmdPipelineBarrier(handle, src_stage_mask, dst_stage_mask, dependency_flags, + memory_barriers.size(), memory_barriers.data(), + buffer_barriers.size(), buffer_barriers.data(), + image_barriers.size(), image_barriers.data()); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags = 0) const noexcept { + PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, {}); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags, + const VkBufferMemoryBarrier& buffer_barrier) const noexcept { + PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, buffer_barrier, {}); + } + + void PipelineBarrier(VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask, + VkDependencyFlags dependency_flags, + const VkImageMemoryBarrier& image_barrier) const noexcept { + PipelineBarrier(src_stage_mask, dst_stage_mask, dependency_flags, {}, {}, image_barrier); + } + + void CopyBufferToImage(VkBuffer src_buffer, VkImage dst_image, VkImageLayout dst_image_layout, + Span regions) const noexcept { + dld->vkCmdCopyBufferToImage(handle, src_buffer, dst_image, dst_image_layout, regions.size(), + regions.data()); + } + + void CopyBuffer(VkBuffer src_buffer, VkBuffer dst_buffer, + Span regions) const noexcept { + dld->vkCmdCopyBuffer(handle, src_buffer, dst_buffer, regions.size(), regions.data()); + } + + void CopyImage(VkImage src_image, VkImageLayout src_layout, VkImage dst_image, + VkImageLayout dst_layout, Span regions) const noexcept { + dld->vkCmdCopyImage(handle, src_image, src_layout, dst_image, dst_layout, regions.size(), + regions.data()); + } + + void CopyImageToBuffer(VkImage src_image, VkImageLayout src_layout, VkBuffer dst_buffer, + Span regions) const noexcept { + dld->vkCmdCopyImageToBuffer(handle, src_image, src_layout, dst_buffer, regions.size(), + regions.data()); + } + + void FillBuffer(VkBuffer dst_buffer, VkDeviceSize dst_offset, VkDeviceSize size, + u32 data) const noexcept { + dld->vkCmdFillBuffer(handle, dst_buffer, dst_offset, size, data); + } + + void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, u32 offset, u32 size, + const void* values) const noexcept { + dld->vkCmdPushConstants(handle, layout, flags, offset, size, values); + } + + template + void PushConstants(VkPipelineLayout layout, VkShaderStageFlags flags, + const T& data) const noexcept { + static_assert(std::is_trivially_copyable_v, " is not trivially copyable"); + dld->vkCmdPushConstants(handle, layout, flags, 0, static_cast(sizeof(T)), &data); + } + + void SetViewport(u32 first, Span viewports) const noexcept { + dld->vkCmdSetViewport(handle, first, viewports.size(), viewports.data()); + } + + void SetScissor(u32 first, Span scissors) const noexcept { + dld->vkCmdSetScissor(handle, first, scissors.size(), scissors.data()); + } + + void SetBlendConstants(const float blend_constants[4]) const noexcept { + dld->vkCmdSetBlendConstants(handle, blend_constants); + } + + void SetStencilCompareMask(VkStencilFaceFlags face_mask, u32 compare_mask) const noexcept { + dld->vkCmdSetStencilCompareMask(handle, face_mask, compare_mask); + } + + void SetStencilReference(VkStencilFaceFlags face_mask, u32 reference) const noexcept { + dld->vkCmdSetStencilReference(handle, face_mask, reference); + } + + void SetStencilWriteMask(VkStencilFaceFlags face_mask, u32 write_mask) const noexcept { + dld->vkCmdSetStencilWriteMask(handle, face_mask, write_mask); + } + + void SetDepthBias(float constant_factor, float clamp, float slope_factor) const noexcept { + dld->vkCmdSetDepthBias(handle, constant_factor, clamp, slope_factor); + } + + void SetDepthBounds(float min_depth_bounds, float max_depth_bounds) const noexcept { + dld->vkCmdSetDepthBounds(handle, min_depth_bounds, max_depth_bounds); + } + + void SetEvent(VkEvent event, VkPipelineStageFlags stage_flags) const noexcept { + dld->vkCmdSetEvent(handle, event, stage_flags); + } + + void WaitEvents(Span events, VkPipelineStageFlags src_stage_mask, + VkPipelineStageFlags dst_stage_mask, Span memory_barriers, + Span buffer_barriers, + Span image_barriers) const noexcept { + dld->vkCmdWaitEvents(handle, events.size(), events.data(), src_stage_mask, dst_stage_mask, + memory_barriers.size(), memory_barriers.data(), buffer_barriers.size(), + buffer_barriers.data(), image_barriers.size(), image_barriers.data()); + } + + void BindVertexBuffers2EXT(u32 first_binding, u32 binding_count, const VkBuffer* buffers, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + const VkDeviceSize* strides) const noexcept { + dld->vkCmdBindVertexBuffers2EXT(handle, first_binding, binding_count, buffers, offsets, + sizes, strides); + } + + void SetCullModeEXT(VkCullModeFlags cull_mode) const noexcept { + dld->vkCmdSetCullModeEXT(handle, cull_mode); + } + + void SetDepthBoundsTestEnableEXT(bool enable) const noexcept { + dld->vkCmdSetDepthBoundsTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void SetDepthCompareOpEXT(VkCompareOp compare_op) const noexcept { + dld->vkCmdSetDepthCompareOpEXT(handle, compare_op); + } + + void SetDepthTestEnableEXT(bool enable) const noexcept { + dld->vkCmdSetDepthTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void SetDepthWriteEnableEXT(bool enable) const noexcept { + dld->vkCmdSetDepthWriteEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void SetFrontFaceEXT(VkFrontFace front_face) const noexcept { + dld->vkCmdSetFrontFaceEXT(handle, front_face); + } + + void SetPrimitiveTopologyEXT(VkPrimitiveTopology primitive_topology) const noexcept { + dld->vkCmdSetPrimitiveTopologyEXT(handle, primitive_topology); + } + + void SetStencilOpEXT(VkStencilFaceFlags face_mask, VkStencilOp fail_op, VkStencilOp pass_op, + VkStencilOp depth_fail_op, VkCompareOp compare_op) const noexcept { + dld->vkCmdSetStencilOpEXT(handle, face_mask, fail_op, pass_op, depth_fail_op, compare_op); + } + + void SetStencilTestEnableEXT(bool enable) const noexcept { + dld->vkCmdSetStencilTestEnableEXT(handle, enable ? VK_TRUE : VK_FALSE); + } + + void BindTransformFeedbackBuffersEXT(u32 first, u32 count, const VkBuffer* buffers, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) const noexcept { + dld->vkCmdBindTransformFeedbackBuffersEXT(handle, first, count, buffers, offsets, sizes); + } + + void BeginTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count, + const VkBuffer* counter_buffers, + const VkDeviceSize* counter_buffer_offsets) const noexcept { + dld->vkCmdBeginTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count, + counter_buffers, counter_buffer_offsets); + } + + void EndTransformFeedbackEXT(u32 first_counter_buffer, u32 counter_buffers_count, + const VkBuffer* counter_buffers, + const VkDeviceSize* counter_buffer_offsets) const noexcept { + dld->vkCmdEndTransformFeedbackEXT(handle, first_counter_buffer, counter_buffers_count, + counter_buffers, counter_buffer_offsets); + } + + void BeginDebugUtilsLabelEXT(const char* label, std::span color) const noexcept { + const VkDebugUtilsLabelEXT label_info{ + .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + .pNext = nullptr, + .pLabelName = label, + .color{color[0], color[1], color[2], color[3]}, + }; + dld->vkCmdBeginDebugUtilsLabelEXT(handle, &label_info); + } + + void EndDebugUtilsLabelEXT() const noexcept { + dld->vkCmdEndDebugUtilsLabelEXT(handle); + } + +private: + VkCommandBuffer handle; + const DeviceDispatch* dld; +}; + +u32 AvailableVersion(const InstanceDispatch& dld) noexcept; + +std::optional> EnumerateInstanceExtensionProperties( + const InstanceDispatch& dld); + +std::optional> EnumerateInstanceLayerProperties( + const InstanceDispatch& dld); + +} // namespace Vulkan::vk -- cgit v1.2.3