From 0e6a4a2226d69767e56915b1d83e15c970d8b14c Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Wed, 16 Oct 2024 17:04:26 -0400 Subject: [PATCH 01/70] Ported vk bootstrap for test apps --- .gitmodules | 3 + CMakeLists.txt | 2 + external/SDL | 1 + test/CMakeLists.txt | 31 + test/test_apps/CMakeLists.txt | 29 + test/test_apps/common/test_app_base.cpp | 2210 ++++++ test/test_apps/common/test_app_base.h | 1026 +++ test/test_apps/common/test_app_dispatch.h | 8098 +++++++++++++++++++++ test/test_apps/triangle/CMakeLists.txt | 70 + test/test_apps/triangle/shaders/frag.spv | Bin 0 -> 608 bytes test/test_apps/triangle/shaders/vert.spv | Bin 0 -> 1540 bytes test/test_apps/triangle/triangle.cpp | 660 ++ 12 files changed, 12130 insertions(+) create mode 160000 external/SDL create mode 100644 test/CMakeLists.txt create mode 100644 test/test_apps/CMakeLists.txt create mode 100644 test/test_apps/common/test_app_base.cpp create mode 100644 test/test_apps/common/test_app_base.h create mode 100644 test/test_apps/common/test_app_dispatch.h create mode 100644 test/test_apps/triangle/CMakeLists.txt create mode 100644 test/test_apps/triangle/shaders/frag.spv create mode 100644 test/test_apps/triangle/shaders/vert.spv create mode 100644 test/test_apps/triangle/triangle.cpp diff --git a/.gitmodules b/.gitmodules index b2ee74512d..af8a56b759 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,3 +7,6 @@ [submodule "external/SPIRV-Reflect"] path = external/SPIRV-Reflect url = https://github.com/KhronosGroup/SPIRV-Reflect.git +[submodule "external/SDL"] + path = external/SDL + url = https://github.com/libsdl-org/SDL.git diff --git a/CMakeLists.txt b/CMakeLists.txt index 4cee2ac567..71e372bae2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -354,12 +354,14 @@ if (${RUN_TESTS}) target_include_directories(catch2 INTERFACE external) endif() +add_subdirectory(external/SDL) add_subdirectory(external/nlohmann) add_subdirectory(framework) if(NOT BUILD_STATIC) add_subdirectory(layer) endif() add_subdirectory(tools) +add_subdirectory(test) if (${RUN_TESTS}) add_test_package_file(${CMAKE_CURRENT_LIST_DIR}/scripts/build.py) diff --git a/external/SDL b/external/SDL new file mode 160000 index 0000000000..3ebfdb04be --- /dev/null +++ b/external/SDL @@ -0,0 +1 @@ +Subproject commit 3ebfdb04be8d543321524113951f817e0e2c56c4 diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt new file mode 100644 index 0000000000..b6c5201c71 --- /dev/null +++ b/test/CMakeLists.txt @@ -0,0 +1,31 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for gfxrecon tests +############################################################################### + +add_subdirectory(test_apps) + +# TODO: runner goes here \ No newline at end of file diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt new file mode 100644 index 0000000000..e99f0396ad --- /dev/null +++ b/test/test_apps/CMakeLists.txt @@ -0,0 +1,29 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for test apps +############################################################################### + +add_subdirectory(triangle) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp new file mode 100644 index 0000000000..1eec42daf5 --- /dev/null +++ b/test/test_apps/common/test_app_base.cpp @@ -0,0 +1,2210 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include "test_app_base.h" + +#include + +#if defined(_WIN32) +#include +#ifndef NOMINMAX +#define NOMINMAX +#endif +#include +#endif // _WIN32 + +#if defined(__linux__) || defined(__APPLE__) +#include +#endif + +#include +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test) + +GFXRECON_BEGIN_NAMESPACE(detail) + +GenericFeaturesPNextNode::GenericFeaturesPNextNode() { memset(fields, UINT8_MAX, sizeof(VkBool32) * field_capacity); } + +bool GenericFeaturesPNextNode::match(GenericFeaturesPNextNode const& requested, GenericFeaturesPNextNode const& supported) noexcept { + assert(requested.sType == supported.sType && "Non-matching sTypes in features nodes!"); + for (uint32_t i = 0; i < field_capacity; i++) { + if (requested.fields[i] && !supported.fields[i]) return false; + } + return true; +} + +void GenericFeaturesPNextNode::combine(GenericFeaturesPNextNode const& right) noexcept { + assert(sType == right.sType && "Non-matching sTypes in features nodes!"); + for (uint32_t i = 0; i < GenericFeaturesPNextNode::field_capacity; i++) { + fields[i] = fields[i] || right.fields[i]; + } +} + +bool GenericFeatureChain::match_all(GenericFeatureChain const& extension_requested) const noexcept { + // Should only be false if extension_supported was unable to be filled out, due to the + // physical device not supporting vkGetPhysicalDeviceFeatures2 in any capacity. + if (extension_requested.nodes.size() != nodes.size()) { + return false; + } + + for (size_t i = 0; i < nodes.size() && i < nodes.size(); ++i) { + if (!GenericFeaturesPNextNode::match(extension_requested.nodes[i], nodes[i])) return false; + } + return true; +} + +bool GenericFeatureChain::find_and_match(GenericFeatureChain const& extensions_requested) const noexcept { + for (const auto& requested_extension_node : extensions_requested.nodes) { + bool found = false; + for (const auto& supported_node : nodes) { + if (supported_node.sType == requested_extension_node.sType) { + found = true; + if (!GenericFeaturesPNextNode::match(requested_extension_node, supported_node)) return false; + break; + } + } + if (!found) return false; + } + return true; +} + +void GenericFeatureChain::chain_up(VkPhysicalDeviceFeatures2& feats2) noexcept { + detail::GenericFeaturesPNextNode* prev = nullptr; + for (auto& extension : nodes) { + if (prev != nullptr) { + prev->pNext = &extension; + } + prev = &extension; + } + feats2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + feats2.pNext = !nodes.empty() ? &nodes.at(0) : nullptr; +} + +void GenericFeatureChain::combine(GenericFeatureChain const& right) noexcept { + for (const auto& right_node : right.nodes) { + bool already_contained = false; + for (auto& left_node : nodes) { + if (left_node.sType == right_node.sType) { + left_node.combine(right_node); + already_contained = true; + } + } + if (!already_contained) { + nodes.push_back(right_node); + } + } +} + + +class VulkanFunctions { + private: + std::mutex init_mutex; + +#if defined(__linux__) || defined(__APPLE__) + void* library = nullptr; +#elif defined(_WIN32) + HMODULE library = nullptr; +#endif + + bool load_vulkan_library() { + // Can immediately return if it has already been loaded + if (library) { + return true; + } +#if defined(__linux__) + library = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); + if (!library) library = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); +#elif defined(__APPLE__) + library = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); + if (!library) library = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); + if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); +#elif defined(_WIN32) + library = LoadLibrary(TEXT("vulkan-1.dll")); +#else + assert(false && "Unsupported platform"); +#endif + if (!library) return false; + load_func(ptr_vkGetInstanceProcAddr, "vkGetInstanceProcAddr"); + return ptr_vkGetInstanceProcAddr != nullptr; + } + + template void load_func(T& func_dest, const char* func_name) { +#if defined(__linux__) || defined(__APPLE__) + func_dest = reinterpret_cast(dlsym(library, func_name)); +#elif defined(_WIN32) + func_dest = reinterpret_cast(GetProcAddress(library, func_name)); +#endif + } + void close() { +#if defined(__linux__) || defined(__APPLE__) + dlclose(library); +#elif defined(_WIN32) + FreeLibrary(library); +#endif + library = 0; + } + + public: + bool init_vulkan_funcs(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr) { + std::lock_guard lg(init_mutex); + if (fp_vkGetInstanceProcAddr != nullptr) { + ptr_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; + } else { + bool ret = load_vulkan_library(); + if (!ret) return false; + } + + fp_vkEnumerateInstanceExtensionProperties = reinterpret_cast( + ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkEnumerateInstanceExtensionProperties")); + fp_vkEnumerateInstanceLayerProperties = reinterpret_cast( + ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkEnumerateInstanceLayerProperties")); + fp_vkEnumerateInstanceVersion = reinterpret_cast( + ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkEnumerateInstanceVersion")); + fp_vkCreateInstance = + reinterpret_cast(ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkCreateInstance")); + return true; + } + + public: + template void get_inst_proc_addr(T& out_ptr, const char* func_name) { + out_ptr = reinterpret_cast(ptr_vkGetInstanceProcAddr(instance, func_name)); + } + + template void get_device_proc_addr(VkDevice device, T& out_ptr, const char* func_name) { + out_ptr = reinterpret_cast(fp_vkGetDeviceProcAddr(device, func_name)); + } + + PFN_vkGetInstanceProcAddr ptr_vkGetInstanceProcAddr = nullptr; + VkInstance instance = nullptr; + + PFN_vkEnumerateInstanceExtensionProperties fp_vkEnumerateInstanceExtensionProperties = nullptr; + PFN_vkEnumerateInstanceLayerProperties fp_vkEnumerateInstanceLayerProperties = nullptr; + PFN_vkEnumerateInstanceVersion fp_vkEnumerateInstanceVersion = nullptr; + PFN_vkCreateInstance fp_vkCreateInstance = nullptr; + + PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; + PFN_vkCreateDebugUtilsMessengerEXT fp_vkCreateDebugUtilsMessengerEXT = nullptr; + PFN_vkDestroyDebugUtilsMessengerEXT fp_vkDestroyDebugUtilsMessengerEXT = nullptr; + PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; + PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; + PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr; + PFN_vkGetPhysicalDeviceFeatures2KHR fp_vkGetPhysicalDeviceFeatures2KHR = nullptr; + PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; + PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; + + PFN_vkCreateDevice fp_vkCreateDevice = nullptr; + PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; + + PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fp_vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; + + void init_instance_funcs(VkInstance inst) { + instance = inst; + get_inst_proc_addr(fp_vkDestroyInstance, "vkDestroyInstance"); + get_inst_proc_addr(fp_vkCreateDebugUtilsMessengerEXT, "vkCreateDebugUtilsMessengerEXT"); + get_inst_proc_addr(fp_vkDestroyDebugUtilsMessengerEXT, "vkDestroyDebugUtilsMessengerEXT"); + get_inst_proc_addr(fp_vkEnumeratePhysicalDevices, "vkEnumeratePhysicalDevices"); + + get_inst_proc_addr(fp_vkGetPhysicalDeviceFeatures, "vkGetPhysicalDeviceFeatures"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceFeatures2, "vkGetPhysicalDeviceFeatures2"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceFeatures2KHR, "vkGetPhysicalDeviceFeatures2KHR"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceQueueFamilyProperties, "vkGetPhysicalDeviceQueueFamilyProperties"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties"); + get_inst_proc_addr(fp_vkEnumerateDeviceExtensionProperties, "vkEnumerateDeviceExtensionProperties"); + + get_inst_proc_addr(fp_vkCreateDevice, "vkCreateDevice"); + get_inst_proc_addr(fp_vkGetDeviceProcAddr, "vkGetDeviceProcAddr"); + + get_inst_proc_addr(fp_vkDestroySurfaceKHR, "vkDestroySurfaceKHR"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfaceSupportKHR, "vkGetPhysicalDeviceSurfaceSupportKHR"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfaceFormatsKHR, "vkGetPhysicalDeviceSurfaceFormatsKHR"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfacePresentModesKHR, "vkGetPhysicalDeviceSurfacePresentModesKHR"); + get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); + } +}; + +static VulkanFunctions& vulkan_functions() { + static VulkanFunctions v; + return v; +} + +// Helper for robustly executing the two-call pattern +template auto get_vector(std::vector& out, F&& f, Ts&&... ts) -> VkResult { + uint32_t count = 0; + VkResult err; + do { + err = f(ts..., &count, nullptr); + if (err != VK_SUCCESS) { + return err; + }; + out.resize(count); + err = f(ts..., &count, out.data()); + out.resize(count); + } while (err == VK_INCOMPLETE); + return err; +} + +template auto get_vector_noerror(F&& f, Ts&&... ts) -> std::vector { + uint32_t count = 0; + std::vector results; + f(ts..., &count, nullptr); + results.resize(count); + f(ts..., &count, results.data()); + results.resize(count); + return results; +} + +GFXRECON_END_NAMESPACE(detail) + +const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s) { + switch (s) { + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: + return "VERBOSE"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: + return "ERROR"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: + return "WARNING"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: + return "INFO"; + default: + return "UNKNOWN"; + } +} +const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s) { + if (s == 7) return "General | Validation | Performance"; + if (s == 6) return "Validation | Performance"; + if (s == 5) return "General | Performance"; + if (s == 4 /*VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT*/) return "Performance"; + if (s == 3) return "General | Validation"; + if (s == 2 /*VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT*/) return "Validation"; + if (s == 1 /*VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT*/) return "General"; + return "Unknown"; +} + +VkResult create_debug_utils_messenger(VkInstance instance, + PFN_vkDebugUtilsMessengerCallbackEXT debug_callback, + VkDebugUtilsMessageSeverityFlagsEXT severity, + VkDebugUtilsMessageTypeFlagsEXT type, + void* user_data_pointer, + VkDebugUtilsMessengerEXT* pDebugMessenger, + VkAllocationCallbacks* allocation_callbacks) { + + if (debug_callback == nullptr) debug_callback = default_debug_callback; + VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {}; + messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + messengerCreateInfo.pNext = nullptr; + messengerCreateInfo.messageSeverity = severity; + messengerCreateInfo.messageType = type; + messengerCreateInfo.pfnUserCallback = debug_callback; + messengerCreateInfo.pUserData = user_data_pointer; + + if (detail::vulkan_functions().fp_vkCreateDebugUtilsMessengerEXT != nullptr) { + return detail::vulkan_functions().fp_vkCreateDebugUtilsMessengerEXT( + instance, &messengerCreateInfo, allocation_callbacks, pDebugMessenger); + } else { + return VK_ERROR_EXTENSION_NOT_PRESENT; + } +} + +void destroy_debug_utils_messenger( + VkInstance instance, VkDebugUtilsMessengerEXT debugMessenger, VkAllocationCallbacks* allocation_callbacks) { + + if (detail::vulkan_functions().fp_vkDestroyDebugUtilsMessengerEXT != nullptr) { + detail::vulkan_functions().fp_vkDestroyDebugUtilsMessengerEXT(instance, debugMessenger, allocation_callbacks); + } +} + +GFXRECON_BEGIN_NAMESPACE(detail) + +bool check_layer_supported(std::vector const& available_layers, const char* layer_name) { + if (!layer_name) return false; + for (const auto& layer_properties : available_layers) { + if (strcmp(layer_name, layer_properties.layerName) == 0) { + return true; + } + } + return false; +} + +bool check_layers_supported(std::vector const& available_layers, std::vector const& layer_names) { + bool all_found = true; + for (const auto& layer_name : layer_names) { + bool found = check_layer_supported(available_layers, layer_name); + if (!found) all_found = false; + } + return all_found; +} + +bool check_extension_supported(std::vector const& available_extensions, const char* extension_name) { + if (!extension_name) return false; + for (const auto& extension_properties : available_extensions) { + if (strcmp(extension_name, extension_properties.extensionName) == 0) { + return true; + } + } + return false; +} + +bool check_extensions_supported( + std::vector const& available_extensions, std::vector const& extension_names) { + bool all_found = true; + for (const auto& extension_name : extension_names) { + bool found = check_extension_supported(available_extensions, extension_name); + if (!found) all_found = false; + } + return all_found; +} + +template void setup_pNext_chain(T& structure, std::vector const& structs) { + structure.pNext = nullptr; + if (structs.size() <= 0) return; + for (size_t i = 0; i < structs.size() - 1; i++) { + structs.at(i)->pNext = structs.at(i + 1); + } + structure.pNext = structs.at(0); +} +const char* validation_layer_name = "VK_LAYER_KHRONOS_validation"; + +struct InstanceErrorCategory : std::error_category { + const char* name() const noexcept override { return "gfxrecon_test_instance"; } + std::string message(int err) const override { return to_string(static_cast(err)); } +}; +const InstanceErrorCategory instance_error_category; + +struct PhysicalDeviceErrorCategory : std::error_category { + const char* name() const noexcept override { return "gfxrecon_test_physical_device"; } + std::string message(int err) const override { return to_string(static_cast(err)); } +}; +const PhysicalDeviceErrorCategory physical_device_error_category; + +struct QueueErrorCategory : std::error_category { + const char* name() const noexcept override { return "gfxrecon_test_queue"; } + std::string message(int err) const override { return to_string(static_cast(err)); } +}; +const QueueErrorCategory queue_error_category; + +struct DeviceErrorCategory : std::error_category { + const char* name() const noexcept override { return "gfxrecon_test_device"; } + std::string message(int err) const override { return to_string(static_cast(err)); } +}; +const DeviceErrorCategory device_error_category; + +struct SwapchainErrorCategory : std::error_category { + const char* name() const noexcept override { return "vbk_swapchain"; } + std::string message(int err) const override { return to_string(static_cast(err)); } +}; +const SwapchainErrorCategory swapchain_error_category; + +GFXRECON_END_NAMESPACE(detail) + +std::error_code make_error_code(InstanceError instance_error) { + return { static_cast(instance_error), detail::instance_error_category }; +} +std::error_code make_error_code(PhysicalDeviceError physical_device_error) { + return { static_cast(physical_device_error), detail::physical_device_error_category }; +} +std::error_code make_error_code(QueueError queue_error) { + return { static_cast(queue_error), detail::queue_error_category }; +} +std::error_code make_error_code(DeviceError device_error) { + return { static_cast(device_error), detail::device_error_category }; +} +std::error_code make_error_code(SwapchainError swapchain_error) { + return { static_cast(swapchain_error), detail::swapchain_error_category }; +} +#define CASE_TO_STRING(CATEGORY, TYPE) \ + case CATEGORY::TYPE: \ + return #TYPE; + +const char* to_string(InstanceError err) { + switch (err) { + CASE_TO_STRING(InstanceError, vulkan_unavailable) + CASE_TO_STRING(InstanceError, vulkan_version_unavailable) + CASE_TO_STRING(InstanceError, vulkan_version_1_1_unavailable) + CASE_TO_STRING(InstanceError, vulkan_version_1_2_unavailable) + CASE_TO_STRING(InstanceError, failed_create_debug_messenger) + CASE_TO_STRING(InstanceError, failed_create_instance) + CASE_TO_STRING(InstanceError, requested_layers_not_present) + CASE_TO_STRING(InstanceError, requested_extensions_not_present) + CASE_TO_STRING(InstanceError, windowing_extensions_not_present) + default: + return ""; + } +} +const char* to_string(PhysicalDeviceError err) { + switch (err) { + CASE_TO_STRING(PhysicalDeviceError, no_surface_provided) + CASE_TO_STRING(PhysicalDeviceError, failed_enumerate_physical_devices) + CASE_TO_STRING(PhysicalDeviceError, no_physical_devices_found) + CASE_TO_STRING(PhysicalDeviceError, no_suitable_device) + default: + return ""; + } +} +const char* to_string(QueueError err) { + switch (err) { + CASE_TO_STRING(QueueError, present_unavailable) + CASE_TO_STRING(QueueError, graphics_unavailable) + CASE_TO_STRING(QueueError, compute_unavailable) + CASE_TO_STRING(QueueError, transfer_unavailable) + CASE_TO_STRING(QueueError, queue_index_out_of_range) + CASE_TO_STRING(QueueError, invalid_queue_family_index) + default: + return ""; + } +} +const char* to_string(DeviceError err) { + switch (err) { + CASE_TO_STRING(DeviceError, failed_create_device) + default: + return ""; + } +} +const char* to_string(SwapchainError err) { + switch (err) { + CASE_TO_STRING(SwapchainError, surface_handle_not_provided) + CASE_TO_STRING(SwapchainError, failed_query_surface_support_details) + CASE_TO_STRING(SwapchainError, failed_create_swapchain) + CASE_TO_STRING(SwapchainError, failed_get_swapchain_images) + CASE_TO_STRING(SwapchainError, failed_create_swapchain_image_views) + CASE_TO_STRING(SwapchainError, required_min_image_count_too_low) + CASE_TO_STRING(SwapchainError, required_usage_not_supported) + default: + return ""; + } +} + +Result SystemInfo::get_system_info() { + if (!detail::vulkan_functions().init_vulkan_funcs(nullptr)) { + return make_error_code(InstanceError::vulkan_unavailable); + } + return SystemInfo(); +} + +Result SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { + // Using externally provided function pointers, assume the loader is available + if (!detail::vulkan_functions().init_vulkan_funcs(fp_vkGetInstanceProcAddr)) { + return make_error_code(InstanceError::vulkan_unavailable); + } + return SystemInfo(); +} + +SystemInfo::SystemInfo() { + auto available_layers_ret = detail::get_vector( + this->available_layers, detail::vulkan_functions().fp_vkEnumerateInstanceLayerProperties); + if (available_layers_ret != VK_SUCCESS) { + this->available_layers.clear(); + } + + for (auto& layer : this->available_layers) + if (strcmp(layer.layerName, detail::validation_layer_name) == 0) validation_layers_available = true; + + auto available_extensions_ret = detail::get_vector( + this->available_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, nullptr); + if (available_extensions_ret != VK_SUCCESS) { + this->available_extensions.clear(); + } + + for (auto& ext : this->available_extensions) { + if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) { + debug_utils_available = true; + } + } + + for (auto& layer : this->available_layers) { + std::vector layer_extensions; + auto layer_extensions_ret = detail::get_vector( + layer_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, layer.layerName); + if (layer_extensions_ret == VK_SUCCESS) { + this->available_extensions.insert( + this->available_extensions.end(), layer_extensions.begin(), layer_extensions.end()); + for (auto& ext : layer_extensions) { + if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) { + debug_utils_available = true; + } + } + } + } +} +bool SystemInfo::is_extension_available(const char* extension_name) const { + if (!extension_name) return false; + return detail::check_extension_supported(available_extensions, extension_name); +} +bool SystemInfo::is_layer_available(const char* layer_name) const { + if (!layer_name) return false; + return detail::check_layer_supported(available_layers, layer_name); +} +void destroy_surface(Instance const& instance, VkSurfaceKHR surface) { + if (instance.instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) { + detail::vulkan_functions().fp_vkDestroySurfaceKHR(instance.instance, surface, instance.allocation_callbacks); + } +} +void destroy_surface(VkInstance instance, VkSurfaceKHR surface, VkAllocationCallbacks* callbacks) { + if (instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) { + detail::vulkan_functions().fp_vkDestroySurfaceKHR(instance, surface, callbacks); + } +} +void destroy_instance(Instance const& instance) { + if (instance.instance != VK_NULL_HANDLE) { + if (instance.debug_messenger != VK_NULL_HANDLE) + destroy_debug_utils_messenger(instance.instance, instance.debug_messenger, instance.allocation_callbacks); + detail::vulkan_functions().fp_vkDestroyInstance(instance.instance, instance.allocation_callbacks); + } +} + +Instance::operator VkInstance() const { return this->instance; } + +InstanceDispatchTable Instance::make_table() const { return { instance, fp_vkGetInstanceProcAddr }; } + +InstanceBuilder::InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { + info.fp_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; +} +InstanceBuilder::InstanceBuilder() {} + +Result InstanceBuilder::build() const { + + auto sys_info_ret = SystemInfo::get_system_info(info.fp_vkGetInstanceProcAddr); + if (!sys_info_ret) return sys_info_ret.error(); + auto system = sys_info_ret.value(); + + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + + if (info.minimum_instance_version > VKB_VK_API_VERSION_1_0 || info.required_api_version > VKB_VK_API_VERSION_1_0 || + info.desired_api_version > VKB_VK_API_VERSION_1_0) { + PFN_vkEnumerateInstanceVersion pfn_vkEnumerateInstanceVersion = detail::vulkan_functions().fp_vkEnumerateInstanceVersion; + + if (pfn_vkEnumerateInstanceVersion != nullptr) { + VkResult res = pfn_vkEnumerateInstanceVersion(&instance_version); + // Should always return VK_SUCCESS + if (res != VK_SUCCESS && info.required_api_version > 0) + return make_error_code(InstanceError::vulkan_version_unavailable); + } + if (pfn_vkEnumerateInstanceVersion == nullptr || instance_version < info.minimum_instance_version || + (info.minimum_instance_version == 0 && instance_version < info.required_api_version)) { + if (VK_VERSION_MINOR(info.required_api_version) == 2) + return make_error_code(InstanceError::vulkan_version_1_2_unavailable); + else if (VK_VERSION_MINOR(info.required_api_version)) + return make_error_code(InstanceError::vulkan_version_1_1_unavailable); + else + return make_error_code(InstanceError::vulkan_version_unavailable); + } + } + + uint32_t api_version = instance_version < VKB_VK_API_VERSION_1_1 ? instance_version : info.required_api_version; + + if (info.desired_api_version > VKB_VK_API_VERSION_1_0 && instance_version >= info.desired_api_version) { + instance_version = info.desired_api_version; + api_version = info.desired_api_version; + } + + VkApplicationInfo app_info = {}; + app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + app_info.pNext = nullptr; + app_info.pApplicationName = info.app_name != nullptr ? info.app_name : ""; + app_info.applicationVersion = info.application_version; + app_info.pEngineName = info.engine_name != nullptr ? info.engine_name : ""; + app_info.engineVersion = info.engine_version; + app_info.apiVersion = api_version; + + std::vector extensions; + std::vector layers; + + for (auto& ext : info.extensions) + extensions.push_back(ext); + if (info.debug_callback != nullptr && info.use_debug_messenger && system.debug_utils_available) { + extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + } + bool properties2_ext_enabled = + api_version < VKB_VK_API_VERSION_1_1 && detail::check_extension_supported(system.available_extensions, + VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + if (properties2_ext_enabled) { + extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + } + +#if defined(VK_KHR_portability_enumeration) + bool portability_enumeration_support = + detail::check_extension_supported(system.available_extensions, VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); + if (portability_enumeration_support) { + extensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); + } +#else + bool portability_enumeration_support = false; +#endif + if (!info.headless_context) { + auto check_add_window_ext = [&](const char* name) -> bool { + if (!detail::check_extension_supported(system.available_extensions, name)) return false; + extensions.push_back(name); + return true; + }; + bool khr_surface_added = check_add_window_ext("VK_KHR_surface"); +#if defined(_WIN32) + bool added_window_exts = check_add_window_ext("VK_KHR_win32_surface"); +#elif defined(__ANDROID__) + bool added_window_exts = check_add_window_ext("VK_KHR_android_surface"); +#elif defined(_DIRECT2DISPLAY) + bool added_window_exts = check_add_window_ext("VK_KHR_display"); +#elif defined(__linux__) + // make sure all three calls to check_add_window_ext, don't allow short circuiting + bool added_window_exts = check_add_window_ext("VK_KHR_xcb_surface"); + added_window_exts = check_add_window_ext("VK_KHR_xlib_surface") || added_window_exts; + added_window_exts = check_add_window_ext("VK_KHR_wayland_surface") || added_window_exts; +#elif defined(__APPLE__) + bool added_window_exts = check_add_window_ext("VK_EXT_metal_surface"); +#endif + if (!khr_surface_added || !added_window_exts) + return make_error_code(InstanceError::windowing_extensions_not_present); + } + bool all_extensions_supported = detail::check_extensions_supported(system.available_extensions, extensions); + if (!all_extensions_supported) { + return make_error_code(InstanceError::requested_extensions_not_present); + } + + for (auto& layer : info.layers) + layers.push_back(layer); + + if (info.enable_validation_layers || (info.request_validation_layers && system.validation_layers_available)) { + layers.push_back(detail::validation_layer_name); + } + bool all_layers_supported = detail::check_layers_supported(system.available_layers, layers); + if (!all_layers_supported) { + return make_error_code(InstanceError::requested_layers_not_present); + } + + std::vector pNext_chain; + + VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {}; + if (info.use_debug_messenger) { + messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + messengerCreateInfo.pNext = nullptr; + messengerCreateInfo.messageSeverity = info.debug_message_severity; + messengerCreateInfo.messageType = info.debug_message_type; + messengerCreateInfo.pfnUserCallback = info.debug_callback; + messengerCreateInfo.pUserData = info.debug_user_data_pointer; + pNext_chain.push_back(reinterpret_cast(&messengerCreateInfo)); + } + + VkValidationFeaturesEXT features{}; + if (info.enabled_validation_features.size() != 0 || info.disabled_validation_features.size()) { + features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT; + features.pNext = nullptr; + features.enabledValidationFeatureCount = static_cast(info.enabled_validation_features.size()); + features.pEnabledValidationFeatures = info.enabled_validation_features.data(); + features.disabledValidationFeatureCount = static_cast(info.disabled_validation_features.size()); + features.pDisabledValidationFeatures = info.disabled_validation_features.data(); + pNext_chain.push_back(reinterpret_cast(&features)); + } + + VkValidationFlagsEXT checks{}; + if (info.disabled_validation_checks.size() != 0) { + checks.sType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT; + checks.pNext = nullptr; + checks.disabledValidationCheckCount = static_cast(info.disabled_validation_checks.size()); + checks.pDisabledValidationChecks = info.disabled_validation_checks.data(); + pNext_chain.push_back(reinterpret_cast(&checks)); + } + + VkInstanceCreateInfo instance_create_info = {}; + instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + detail::setup_pNext_chain(instance_create_info, pNext_chain); +#if !defined(NDEBUG) + for (auto& node : pNext_chain) { + assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO); + } +#endif + instance_create_info.flags = info.flags; + instance_create_info.pApplicationInfo = &app_info; + instance_create_info.enabledExtensionCount = static_cast(extensions.size()); + instance_create_info.ppEnabledExtensionNames = extensions.data(); + instance_create_info.enabledLayerCount = static_cast(layers.size()); + instance_create_info.ppEnabledLayerNames = layers.data(); +#if defined(VK_KHR_portability_enumeration) + if (portability_enumeration_support) { + instance_create_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR; + } +#endif + + Instance instance; + VkResult res = + detail::vulkan_functions().fp_vkCreateInstance(&instance_create_info, info.allocation_callbacks, &instance.instance); + if (res != VK_SUCCESS) return Result(InstanceError::failed_create_instance, res); + + detail::vulkan_functions().init_instance_funcs(instance.instance); + + if (info.use_debug_messenger) { + res = create_debug_utils_messenger(instance.instance, + info.debug_callback, + info.debug_message_severity, + info.debug_message_type, + info.debug_user_data_pointer, + &instance.debug_messenger, + info.allocation_callbacks); + if (res != VK_SUCCESS) { + return Result(InstanceError::failed_create_debug_messenger, res); + } + } + + instance.headless = info.headless_context; + instance.properties2_ext_enabled = properties2_ext_enabled; + instance.allocation_callbacks = info.allocation_callbacks; + instance.instance_version = instance_version; + instance.api_version = api_version; + instance.fp_vkGetInstanceProcAddr = detail::vulkan_functions().ptr_vkGetInstanceProcAddr; + instance.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr; + return instance; +} + +InstanceBuilder& InstanceBuilder::set_app_name(const char* app_name) { + if (!app_name) return *this; + info.app_name = app_name; + return *this; +} +InstanceBuilder& InstanceBuilder::set_engine_name(const char* engine_name) { + if (!engine_name) return *this; + info.engine_name = engine_name; + return *this; +} +InstanceBuilder& InstanceBuilder::set_app_version(uint32_t app_version) { + info.application_version = app_version; + return *this; +} +InstanceBuilder& InstanceBuilder::set_app_version(uint32_t major, uint32_t minor, uint32_t patch) { + info.application_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); + return *this; +} +InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t engine_version) { + info.engine_version = engine_version; + return *this; +} +InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t major, uint32_t minor, uint32_t patch) { + info.engine_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); + return *this; +} +InstanceBuilder& InstanceBuilder::require_api_version(uint32_t required_api_version) { + info.required_api_version = required_api_version; + return *this; +} +InstanceBuilder& InstanceBuilder::require_api_version(uint32_t major, uint32_t minor, uint32_t patch) { + info.required_api_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); + return *this; +} +InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t minimum_instance_version) { + info.minimum_instance_version = minimum_instance_version; + return *this; +} +InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t major, uint32_t minor, uint32_t patch) { + info.minimum_instance_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); + return *this; +} +InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t preferred_vulkan_version) { + info.desired_api_version = preferred_vulkan_version; + return *this; +} +InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t major, uint32_t minor, uint32_t patch) { + info.desired_api_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); + return *this; +} +InstanceBuilder& InstanceBuilder::enable_layer(const char* layer_name) { + if (!layer_name) return *this; + info.layers.push_back(layer_name); + return *this; +} +InstanceBuilder& InstanceBuilder::enable_extension(const char* extension_name) { + if (!extension_name) return *this; + info.extensions.push_back(extension_name); + return *this; +} +InstanceBuilder& InstanceBuilder::enable_extensions(std::vector const& extensions) { + for (const auto extension : extensions) { + info.extensions.push_back(extension); + } + return *this; +} +InstanceBuilder& InstanceBuilder::enable_extensions(size_t count, const char* const* extensions) { + if (!extensions || count == 0) return *this; + for (size_t i = 0; i < count; i++) { + info.extensions.push_back(extensions[i]); + } + return *this; +} +InstanceBuilder& InstanceBuilder::enable_validation_layers(bool enable_validation) { + info.enable_validation_layers = enable_validation; + return *this; +} +InstanceBuilder& InstanceBuilder::request_validation_layers(bool enable_validation) { + info.request_validation_layers = enable_validation; + return *this; +} + +InstanceBuilder& InstanceBuilder::use_default_debug_messenger() { + info.use_debug_messenger = true; + info.debug_callback = default_debug_callback; + return *this; +} +InstanceBuilder& InstanceBuilder::set_debug_callback(PFN_vkDebugUtilsMessengerCallbackEXT callback) { + info.use_debug_messenger = true; + info.debug_callback = callback; + return *this; +} +InstanceBuilder& InstanceBuilder::set_debug_callback_user_data_pointer(void* user_data_pointer) { + info.debug_user_data_pointer = user_data_pointer; + return *this; +} +InstanceBuilder& InstanceBuilder::set_headless(bool headless) { + info.headless_context = headless; + return *this; +} +InstanceBuilder& InstanceBuilder::set_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) { + info.debug_message_severity = severity; + return *this; +} +InstanceBuilder& InstanceBuilder::add_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) { + info.debug_message_severity = info.debug_message_severity | severity; + return *this; +} +InstanceBuilder& InstanceBuilder::set_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) { + info.debug_message_type = type; + return *this; +} +InstanceBuilder& InstanceBuilder::add_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) { + info.debug_message_type = info.debug_message_type | type; + return *this; +} +InstanceBuilder& InstanceBuilder::add_validation_disable(VkValidationCheckEXT check) { + info.disabled_validation_checks.push_back(check); + return *this; +} +InstanceBuilder& InstanceBuilder::add_validation_feature_enable(VkValidationFeatureEnableEXT enable) { + info.enabled_validation_features.push_back(enable); + return *this; +} +InstanceBuilder& InstanceBuilder::add_validation_feature_disable(VkValidationFeatureDisableEXT disable) { + info.disabled_validation_features.push_back(disable); + return *this; +} +InstanceBuilder& InstanceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { + info.allocation_callbacks = callbacks; + return *this; +} + +void destroy_debug_messenger(VkInstance const instance, VkDebugUtilsMessengerEXT const messenger); + + +// ---- Physical Device ---- // + +GFXRECON_BEGIN_NAMESPACE(detail) + +std::vector check_device_extension_support( + std::vector const& available_extensions, std::vector const& desired_extensions) { + std::vector extensions_to_enable; + for (const auto& avail_ext : available_extensions) { + for (auto& req_ext : desired_extensions) { + if (avail_ext == req_ext) { + extensions_to_enable.push_back(req_ext); + break; + } + } + } + return extensions_to_enable; +} + +// clang-format off +void combine_features(VkPhysicalDeviceFeatures& dest, VkPhysicalDeviceFeatures src){ + dest.robustBufferAccess = dest.robustBufferAccess || src.robustBufferAccess; + dest.fullDrawIndexUint32 = dest.fullDrawIndexUint32 || src.fullDrawIndexUint32; + dest.imageCubeArray = dest.imageCubeArray || src.imageCubeArray; + dest.independentBlend = dest.independentBlend || src.independentBlend; + dest.geometryShader = dest.geometryShader || src.geometryShader; + dest.tessellationShader = dest.tessellationShader || src.tessellationShader; + dest.sampleRateShading = dest.sampleRateShading || src.sampleRateShading; + dest.dualSrcBlend = dest.dualSrcBlend || src.dualSrcBlend; + dest.logicOp = dest.logicOp || src.logicOp; + dest.multiDrawIndirect = dest.multiDrawIndirect || src.multiDrawIndirect; + dest.drawIndirectFirstInstance = dest.drawIndirectFirstInstance || src.drawIndirectFirstInstance; + dest.depthClamp = dest.depthClamp || src.depthClamp; + dest.depthBiasClamp = dest.depthBiasClamp || src.depthBiasClamp; + dest.fillModeNonSolid = dest.fillModeNonSolid || src.fillModeNonSolid; + dest.depthBounds = dest.depthBounds || src.depthBounds; + dest.wideLines = dest.wideLines || src.wideLines; + dest.largePoints = dest.largePoints || src.largePoints; + dest.alphaToOne = dest.alphaToOne || src.alphaToOne; + dest.multiViewport = dest.multiViewport || src.multiViewport; + dest.samplerAnisotropy = dest.samplerAnisotropy || src.samplerAnisotropy; + dest.textureCompressionETC2 = dest.textureCompressionETC2 || src.textureCompressionETC2; + dest.textureCompressionASTC_LDR = dest.textureCompressionASTC_LDR || src.textureCompressionASTC_LDR; + dest.textureCompressionBC = dest.textureCompressionBC || src.textureCompressionBC; + dest.occlusionQueryPrecise = dest.occlusionQueryPrecise || src.occlusionQueryPrecise; + dest.pipelineStatisticsQuery = dest.pipelineStatisticsQuery || src.pipelineStatisticsQuery; + dest.vertexPipelineStoresAndAtomics = dest.vertexPipelineStoresAndAtomics || src.vertexPipelineStoresAndAtomics; + dest.fragmentStoresAndAtomics = dest.fragmentStoresAndAtomics || src.fragmentStoresAndAtomics; + dest.shaderTessellationAndGeometryPointSize = dest.shaderTessellationAndGeometryPointSize || src.shaderTessellationAndGeometryPointSize; + dest.shaderImageGatherExtended = dest.shaderImageGatherExtended || src.shaderImageGatherExtended; + dest.shaderStorageImageExtendedFormats = dest.shaderStorageImageExtendedFormats || src.shaderStorageImageExtendedFormats; + dest.shaderStorageImageMultisample = dest.shaderStorageImageMultisample || src.shaderStorageImageMultisample; + dest.shaderStorageImageReadWithoutFormat = dest.shaderStorageImageReadWithoutFormat || src.shaderStorageImageReadWithoutFormat; + dest.shaderStorageImageWriteWithoutFormat = dest.shaderStorageImageWriteWithoutFormat || src.shaderStorageImageWriteWithoutFormat; + dest.shaderUniformBufferArrayDynamicIndexing = dest.shaderUniformBufferArrayDynamicIndexing || src.shaderUniformBufferArrayDynamicIndexing; + dest.shaderSampledImageArrayDynamicIndexing = dest.shaderSampledImageArrayDynamicIndexing || src.shaderSampledImageArrayDynamicIndexing; + dest.shaderStorageBufferArrayDynamicIndexing = dest.shaderStorageBufferArrayDynamicIndexing || src.shaderStorageBufferArrayDynamicIndexing; + dest.shaderStorageImageArrayDynamicIndexing = dest.shaderStorageImageArrayDynamicIndexing || src.shaderStorageImageArrayDynamicIndexing; + dest.shaderClipDistance = dest.shaderClipDistance || src.shaderClipDistance; + dest.shaderCullDistance = dest.shaderCullDistance || src.shaderCullDistance; + dest.shaderFloat64 = dest.shaderFloat64 || src.shaderFloat64; + dest.shaderInt64 = dest.shaderInt64 || src.shaderInt64; + dest.shaderInt16 = dest.shaderInt16 || src.shaderInt16; + dest.shaderResourceResidency = dest.shaderResourceResidency || src.shaderResourceResidency; + dest.shaderResourceMinLod = dest.shaderResourceMinLod || src.shaderResourceMinLod; + dest.sparseBinding = dest.sparseBinding || src.sparseBinding; + dest.sparseResidencyBuffer = dest.sparseResidencyBuffer || src.sparseResidencyBuffer; + dest.sparseResidencyImage2D = dest.sparseResidencyImage2D || src.sparseResidencyImage2D; + dest.sparseResidencyImage3D = dest.sparseResidencyImage3D || src.sparseResidencyImage3D; + dest.sparseResidency2Samples = dest.sparseResidency2Samples || src.sparseResidency2Samples; + dest.sparseResidency4Samples = dest.sparseResidency4Samples || src.sparseResidency4Samples; + dest.sparseResidency8Samples = dest.sparseResidency8Samples || src.sparseResidency8Samples; + dest.sparseResidency16Samples = dest.sparseResidency16Samples || src.sparseResidency16Samples; + dest.sparseResidencyAliased = dest.sparseResidencyAliased || src.sparseResidencyAliased; + dest.variableMultisampleRate = dest.variableMultisampleRate || src.variableMultisampleRate; + dest.inheritedQueries = dest.inheritedQueries || src.inheritedQueries; +} + +bool supports_features(const VkPhysicalDeviceFeatures& supported, + const VkPhysicalDeviceFeatures& requested, + const GenericFeatureChain& extension_supported, + const GenericFeatureChain& extension_requested) { + + if (requested.robustBufferAccess && !supported.robustBufferAccess) return false; + if (requested.fullDrawIndexUint32 && !supported.fullDrawIndexUint32) return false; + if (requested.imageCubeArray && !supported.imageCubeArray) return false; + if (requested.independentBlend && !supported.independentBlend) return false; + if (requested.geometryShader && !supported.geometryShader) return false; + if (requested.tessellationShader && !supported.tessellationShader) return false; + if (requested.sampleRateShading && !supported.sampleRateShading) return false; + if (requested.dualSrcBlend && !supported.dualSrcBlend) return false; + if (requested.logicOp && !supported.logicOp) return false; + if (requested.multiDrawIndirect && !supported.multiDrawIndirect) return false; + if (requested.drawIndirectFirstInstance && !supported.drawIndirectFirstInstance) return false; + if (requested.depthClamp && !supported.depthClamp) return false; + if (requested.depthBiasClamp && !supported.depthBiasClamp) return false; + if (requested.fillModeNonSolid && !supported.fillModeNonSolid) return false; + if (requested.depthBounds && !supported.depthBounds) return false; + if (requested.wideLines && !supported.wideLines) return false; + if (requested.largePoints && !supported.largePoints) return false; + if (requested.alphaToOne && !supported.alphaToOne) return false; + if (requested.multiViewport && !supported.multiViewport) return false; + if (requested.samplerAnisotropy && !supported.samplerAnisotropy) return false; + if (requested.textureCompressionETC2 && !supported.textureCompressionETC2) return false; + if (requested.textureCompressionASTC_LDR && !supported.textureCompressionASTC_LDR) return false; + if (requested.textureCompressionBC && !supported.textureCompressionBC) return false; + if (requested.occlusionQueryPrecise && !supported.occlusionQueryPrecise) return false; + if (requested.pipelineStatisticsQuery && !supported.pipelineStatisticsQuery) return false; + if (requested.vertexPipelineStoresAndAtomics && !supported.vertexPipelineStoresAndAtomics) return false; + if (requested.fragmentStoresAndAtomics && !supported.fragmentStoresAndAtomics) return false; + if (requested.shaderTessellationAndGeometryPointSize && !supported.shaderTessellationAndGeometryPointSize) return false; + if (requested.shaderImageGatherExtended && !supported.shaderImageGatherExtended) return false; + if (requested.shaderStorageImageExtendedFormats && !supported.shaderStorageImageExtendedFormats) return false; + if (requested.shaderStorageImageMultisample && !supported.shaderStorageImageMultisample) return false; + if (requested.shaderStorageImageReadWithoutFormat && !supported.shaderStorageImageReadWithoutFormat) return false; + if (requested.shaderStorageImageWriteWithoutFormat && !supported.shaderStorageImageWriteWithoutFormat) return false; + if (requested.shaderUniformBufferArrayDynamicIndexing && !supported.shaderUniformBufferArrayDynamicIndexing) return false; + if (requested.shaderSampledImageArrayDynamicIndexing && !supported.shaderSampledImageArrayDynamicIndexing) return false; + if (requested.shaderStorageBufferArrayDynamicIndexing && !supported.shaderStorageBufferArrayDynamicIndexing) return false; + if (requested.shaderStorageImageArrayDynamicIndexing && !supported.shaderStorageImageArrayDynamicIndexing) return false; + if (requested.shaderClipDistance && !supported.shaderClipDistance) return false; + if (requested.shaderCullDistance && !supported.shaderCullDistance) return false; + if (requested.shaderFloat64 && !supported.shaderFloat64) return false; + if (requested.shaderInt64 && !supported.shaderInt64) return false; + if (requested.shaderInt16 && !supported.shaderInt16) return false; + if (requested.shaderResourceResidency && !supported.shaderResourceResidency) return false; + if (requested.shaderResourceMinLod && !supported.shaderResourceMinLod) return false; + if (requested.sparseBinding && !supported.sparseBinding) return false; + if (requested.sparseResidencyBuffer && !supported.sparseResidencyBuffer) return false; + if (requested.sparseResidencyImage2D && !supported.sparseResidencyImage2D) return false; + if (requested.sparseResidencyImage3D && !supported.sparseResidencyImage3D) return false; + if (requested.sparseResidency2Samples && !supported.sparseResidency2Samples) return false; + if (requested.sparseResidency4Samples && !supported.sparseResidency4Samples) return false; + if (requested.sparseResidency8Samples && !supported.sparseResidency8Samples) return false; + if (requested.sparseResidency16Samples && !supported.sparseResidency16Samples) return false; + if (requested.sparseResidencyAliased && !supported.sparseResidencyAliased) return false; + if (requested.variableMultisampleRate && !supported.variableMultisampleRate) return false; + if (requested.inheritedQueries && !supported.inheritedQueries) return false; + + return extension_supported.match_all(extension_requested); +} +// clang-format on +// Finds the first queue which supports the desired operations. Returns QUEUE_INDEX_MAX_VALUE if none is found +uint32_t get_first_queue_index(std::vector const& families, VkQueueFlags desired_flags) { + for (uint32_t i = 0; i < static_cast(families.size()); i++) { + if ((families[i].queueFlags & desired_flags) == desired_flags) return i; + } + return QUEUE_INDEX_MAX_VALUE; +} +// Finds the queue which is separate from the graphics queue and has the desired flag and not the +// undesired flag, but will select it if no better options are available compute support. Returns +// QUEUE_INDEX_MAX_VALUE if none is found. +uint32_t get_separate_queue_index( + std::vector const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) { + uint32_t index = QUEUE_INDEX_MAX_VALUE; + for (uint32_t i = 0; i < static_cast(families.size()); i++) { + if ((families[i].queueFlags & desired_flags) == desired_flags && ((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) { + if ((families[i].queueFlags & undesired_flags) == 0) { + return i; + } else { + index = i; + } + } + } + return index; +} + +// finds the first queue which supports only the desired flag (not graphics or transfer). Returns QUEUE_INDEX_MAX_VALUE if none is found. +uint32_t get_dedicated_queue_index( + std::vector const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) { + for (uint32_t i = 0; i < static_cast(families.size()); i++) { + if ((families[i].queueFlags & desired_flags) == desired_flags && + (families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0 && (families[i].queueFlags & undesired_flags) == 0) + return i; + } + return QUEUE_INDEX_MAX_VALUE; +} + +// finds the first queue which supports presenting. returns QUEUE_INDEX_MAX_VALUE if none is found +uint32_t get_present_queue_index( + VkPhysicalDevice const phys_device, VkSurfaceKHR const surface, std::vector const& families) { + for (uint32_t i = 0; i < static_cast(families.size()); i++) { + VkBool32 presentSupport = false; + if (surface != VK_NULL_HANDLE) { + VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceSupportKHR(phys_device, i, surface, &presentSupport); + if (res != VK_SUCCESS) return QUEUE_INDEX_MAX_VALUE; // TODO: determine if this should fail another way + } + if (presentSupport == VK_TRUE) return i; + } + return QUEUE_INDEX_MAX_VALUE; +} + +GFXRECON_END_NAMESPACE(detail) + +PhysicalDevice PhysicalDeviceSelector::populate_device_details( + VkPhysicalDevice vk_phys_device, detail::GenericFeatureChain const& src_extended_features_chain) const { + PhysicalDevice physical_device{}; + physical_device.physical_device = vk_phys_device; + physical_device.surface = instance_info.surface; + physical_device.defer_surface_initialization = criteria.defer_surface_initialization; + physical_device.instance_version = instance_info.version; + auto queue_families = detail::get_vector_noerror( + detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, vk_phys_device); + physical_device.queue_families = queue_families; + + detail::vulkan_functions().fp_vkGetPhysicalDeviceProperties(vk_phys_device, &physical_device.properties); + detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures(vk_phys_device, &physical_device.features); + detail::vulkan_functions().fp_vkGetPhysicalDeviceMemoryProperties(vk_phys_device, &physical_device.memory_properties); + + physical_device.name = physical_device.properties.deviceName; + + std::vector available_extensions; + auto available_extensions_ret = detail::get_vector( + available_extensions, detail::vulkan_functions().fp_vkEnumerateDeviceExtensionProperties, vk_phys_device, nullptr); + if (available_extensions_ret != VK_SUCCESS) return physical_device; + for (const auto& ext : available_extensions) { + physical_device.available_extensions.push_back(&ext.extensionName[0]); + } + + physical_device.properties2_ext_enabled = instance_info.properties2_ext_enabled; + + auto fill_chain = src_extended_features_chain; + + bool instance_is_1_1 = instance_info.version >= VKB_VK_API_VERSION_1_1; + if (!fill_chain.nodes.empty() && (instance_is_1_1 || instance_info.properties2_ext_enabled)) { + VkPhysicalDeviceFeatures2 local_features{}; + fill_chain.chain_up(local_features); + // Use KHR function if not able to use the core function + if (instance_is_1_1) { + detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2(vk_phys_device, &local_features); + } else { + detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2KHR(vk_phys_device, &local_features); + } + physical_device.extended_features_chain = fill_chain; + } + + return physical_device; +} + +PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevice const& pd) const { + PhysicalDevice::Suitable suitable = PhysicalDevice::Suitable::yes; + + if (criteria.name.size() > 0 && criteria.name != pd.properties.deviceName) return PhysicalDevice::Suitable::no; + + if (criteria.required_version > pd.properties.apiVersion) return PhysicalDevice::Suitable::no; + if (criteria.desired_version > pd.properties.apiVersion) suitable = PhysicalDevice::Suitable::partial; + + bool dedicated_compute = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != + detail::QUEUE_INDEX_MAX_VALUE; + bool dedicated_transfer = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != + detail::QUEUE_INDEX_MAX_VALUE; + bool separate_compute = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != + detail::QUEUE_INDEX_MAX_VALUE; + bool separate_transfer = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != + detail::QUEUE_INDEX_MAX_VALUE; + + bool present_queue = detail::get_present_queue_index(pd.physical_device, instance_info.surface, pd.queue_families) != + detail::QUEUE_INDEX_MAX_VALUE; + + if (criteria.require_dedicated_compute_queue && !dedicated_compute) return PhysicalDevice::Suitable::no; + if (criteria.require_dedicated_transfer_queue && !dedicated_transfer) return PhysicalDevice::Suitable::no; + if (criteria.require_separate_compute_queue && !separate_compute) return PhysicalDevice::Suitable::no; + if (criteria.require_separate_transfer_queue && !separate_transfer) return PhysicalDevice::Suitable::no; + if (criteria.require_present && !present_queue && !criteria.defer_surface_initialization) + return PhysicalDevice::Suitable::no; + + auto required_extensions_supported = + detail::check_device_extension_support(pd.available_extensions, criteria.required_extensions); + if (required_extensions_supported.size() != criteria.required_extensions.size()) + return PhysicalDevice::Suitable::no; + + auto desired_extensions_supported = detail::check_device_extension_support(pd.available_extensions, criteria.desired_extensions); + if (desired_extensions_supported.size() != criteria.desired_extensions.size()) + suitable = PhysicalDevice::Suitable::partial; + + if (!criteria.defer_surface_initialization && criteria.require_present) { + std::vector formats; + std::vector present_modes; + + auto formats_ret = detail::get_vector(formats, + detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, + pd.physical_device, + instance_info.surface); + auto present_modes_ret = detail::get_vector(present_modes, + detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, + pd.physical_device, + instance_info.surface); + + if (formats_ret != VK_SUCCESS || present_modes_ret != VK_SUCCESS || formats.empty() || present_modes.empty()) { + return PhysicalDevice::Suitable::no; + } + } + + if (!criteria.allow_any_type && pd.properties.deviceType != static_cast(criteria.preferred_type)) { + suitable = PhysicalDevice::Suitable::partial; + } + + bool required_features_supported = detail::supports_features( + pd.features, criteria.required_features, pd.extended_features_chain, criteria.extended_features_chain); + if (!required_features_supported) return PhysicalDevice::Suitable::no; + + for (uint32_t i = 0; i < pd.memory_properties.memoryHeapCount; i++) { + if (pd.memory_properties.memoryHeaps[i].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) { + if (pd.memory_properties.memoryHeaps[i].size < criteria.required_mem_size) { + return PhysicalDevice::Suitable::no; + } else if (pd.memory_properties.memoryHeaps[i].size < criteria.desired_mem_size) { + suitable = PhysicalDevice::Suitable::partial; + } + } + } + + return suitable; +} +// delegate construction to the one with an explicit surface parameter +PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance) + : PhysicalDeviceSelector(instance, VK_NULL_HANDLE) {} + +PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance, VkSurfaceKHR surface) { + instance_info.instance = instance.instance; + instance_info.version = instance.instance_version; + instance_info.properties2_ext_enabled = instance.properties2_ext_enabled; + instance_info.surface = surface; + criteria.require_present = !instance.headless; + criteria.required_version = instance.api_version; + criteria.desired_version = instance.api_version; +} + +Result> PhysicalDeviceSelector::select_impl(DeviceSelectionMode selection) const { +#if !defined(NDEBUG) + // Validation + for (const auto& node : criteria.extended_features_chain.nodes) { + assert(node.sType != static_cast(0) && + "Features struct sType must be filled with the struct's " + "corresponding VkStructureType enum"); + assert(node.sType != VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 && + "Do not pass VkPhysicalDeviceFeatures2 as a required extension feature structure. An " + "instance of this is managed internally for selection criteria and device creation."); + } +#endif + + if (criteria.require_present && !criteria.defer_surface_initialization) { + if (instance_info.surface == VK_NULL_HANDLE) + return Result>{ PhysicalDeviceError::no_surface_provided }; + } + + // Get the VkPhysicalDevice handles on the system + std::vector vk_physical_devices; + + auto vk_physical_devices_ret = detail::get_vector( + vk_physical_devices, detail::vulkan_functions().fp_vkEnumeratePhysicalDevices, instance_info.instance); + if (vk_physical_devices_ret != VK_SUCCESS) { + return Result>{ PhysicalDeviceError::failed_enumerate_physical_devices, vk_physical_devices_ret }; + } + if (vk_physical_devices.size() == 0) { + return Result>{ PhysicalDeviceError::no_physical_devices_found }; + } + + auto fill_out_phys_dev_with_criteria = [&](PhysicalDevice& phys_dev) { + phys_dev.features = criteria.required_features; + phys_dev.extended_features_chain = criteria.extended_features_chain; + bool portability_ext_available = false; + for (const auto& ext : phys_dev.available_extensions) + if (criteria.enable_portability_subset && ext == "VK_KHR_portability_subset") + portability_ext_available = true; + + auto desired_extensions_supported = + detail::check_device_extension_support(phys_dev.available_extensions, criteria.desired_extensions); + + phys_dev.extensions_to_enable.clear(); + phys_dev.extensions_to_enable.insert( + phys_dev.extensions_to_enable.end(), criteria.required_extensions.begin(), criteria.required_extensions.end()); + phys_dev.extensions_to_enable.insert( + phys_dev.extensions_to_enable.end(), desired_extensions_supported.begin(), desired_extensions_supported.end()); + if (portability_ext_available) { + phys_dev.extensions_to_enable.push_back("VK_KHR_portability_subset"); + } + }; + + // if this option is set, always return only the first physical device found + if (criteria.use_first_gpu_unconditionally && vk_physical_devices.size() > 0) { + PhysicalDevice physical_device = populate_device_details(vk_physical_devices[0], criteria.extended_features_chain); + fill_out_phys_dev_with_criteria(physical_device); + return std::vector{ physical_device }; + } + + // Populate their details and check their suitability + std::vector physical_devices; + for (auto& vk_physical_device : vk_physical_devices) { + PhysicalDevice phys_dev = populate_device_details(vk_physical_device, criteria.extended_features_chain); + phys_dev.suitable = is_device_suitable(phys_dev); + if (phys_dev.suitable != PhysicalDevice::Suitable::no) { + physical_devices.push_back(phys_dev); + } + } + + // sort the list into fully and partially suitable devices. use stable_partition to maintain relative order + const auto partition_index = std::stable_partition(physical_devices.begin(), physical_devices.end(), [](auto const& pd) { + return pd.suitable == PhysicalDevice::Suitable::yes; + }); + + // Remove the partially suitable elements if they aren't desired + if (selection == DeviceSelectionMode::only_fully_suitable) { + physical_devices.erase(partition_index, physical_devices.end()); + } + + // Make the physical device ready to be used to create a Device from it + for (auto& physical_device : physical_devices) { + fill_out_phys_dev_with_criteria(physical_device); + } + + return physical_devices; +} + +Result PhysicalDeviceSelector::select(DeviceSelectionMode selection) const { + auto const selected_devices = select_impl(selection); + + if (!selected_devices) return Result{ selected_devices.error() }; + if (selected_devices.value().size() == 0) { + return Result{ PhysicalDeviceError::no_suitable_device }; + } + + return selected_devices.value().at(0); +} + +// Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device +Result> PhysicalDeviceSelector::select_devices(DeviceSelectionMode selection) const { + auto const selected_devices = select_impl(selection); + if (!selected_devices) return Result>{ selected_devices.error() }; + if (selected_devices.value().size() == 0) { + return Result>{ PhysicalDeviceError::no_suitable_device }; + } + return selected_devices.value(); +} + +Result> PhysicalDeviceSelector::select_device_names(DeviceSelectionMode selection) const { + auto const selected_devices = select_impl(selection); + if (!selected_devices) return Result>{ selected_devices.error() }; + if (selected_devices.value().size() == 0) { + return Result>{ PhysicalDeviceError::no_suitable_device }; + } + std::vector names; + for (const auto& pd : selected_devices.value()) { + names.push_back(pd.name); + } + return names; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::set_surface(VkSurfaceKHR surface) { + instance_info.surface = surface; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::set_name(std::string const& name) { + criteria.name = name; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::prefer_gpu_device_type(PreferredDeviceType type) { + criteria.preferred_type = type; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::allow_any_gpu_device_type(bool allow_any_type) { + criteria.allow_any_type = allow_any_type; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::require_present(bool require) { + criteria.require_present = require; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_transfer_queue() { + criteria.require_dedicated_transfer_queue = true; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_compute_queue() { + criteria.require_dedicated_compute_queue = true; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_transfer_queue() { + criteria.require_separate_transfer_queue = true; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_compute_queue() { + criteria.require_separate_compute_queue = true; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::required_device_memory_size(VkDeviceSize size) { + criteria.required_mem_size = size; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::desired_device_memory_size(VkDeviceSize size) { + criteria.desired_mem_size = size; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extension(const char* extension) { + criteria.required_extensions.push_back(extension); + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(std::vector const& extensions) { + for (const auto& ext : extensions) { + criteria.required_extensions.push_back(ext); + } + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(size_t count, const char* const* extensions) { + if (!extensions || count == 0) return *this; + for (size_t i = 0; i < count; i++) { + criteria.required_extensions.push_back(extensions[i]); + } + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extension(const char* extension) { + criteria.desired_extensions.push_back(extension); + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extensions(const std::vector& extensions) { + for (const auto& ext : extensions) { + criteria.desired_extensions.push_back(ext); + } + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::set_minimum_version(uint32_t major, uint32_t minor) { + criteria.required_version = VKB_MAKE_VK_VERSION(0, major, minor, 0); + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::set_desired_version(uint32_t major, uint32_t minor) { + criteria.desired_version = VKB_MAKE_VK_VERSION(0, major, minor, 0); + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::disable_portability_subset() { + criteria.enable_portability_subset = false; + return *this; +} + +PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features(VkPhysicalDeviceFeatures const& features) { + detail::combine_features(criteria.required_features, features); + return *this; +} +#if defined(VKB_VK_API_VERSION_1_2) +// The implementation of the set_required_features_1X functions sets the sType manually. This was a poor choice since +// users of Vulkan should expect to fill out their structs properly. To make the functions take the struct parameter by +// const reference, a local copy must be made in order to set the sType. +PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_11(VkPhysicalDeviceVulkan11Features const& features_11) { + VkPhysicalDeviceVulkan11Features features_11_copy = features_11; + features_11_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; + add_required_extension_features(features_11_copy); + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_12(VkPhysicalDeviceVulkan12Features const& features_12) { + VkPhysicalDeviceVulkan12Features features_12_copy = features_12; + features_12_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; + add_required_extension_features(features_12_copy); + return *this; +} +#endif +#if defined(VKB_VK_API_VERSION_1_3) +PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_13(VkPhysicalDeviceVulkan13Features const& features_13) { + VkPhysicalDeviceVulkan13Features features_13_copy = features_13; + features_13_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES; + add_required_extension_features(features_13_copy); + return *this; +} +#endif +PhysicalDeviceSelector& PhysicalDeviceSelector::defer_surface_initialization() { + criteria.defer_surface_initialization = true; + return *this; +} +PhysicalDeviceSelector& PhysicalDeviceSelector::select_first_device_unconditionally(bool unconditionally) { + criteria.use_first_gpu_unconditionally = unconditionally; + return *this; +} + +// PhysicalDevice +bool PhysicalDevice::has_dedicated_compute_queue() const { + return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != detail::QUEUE_INDEX_MAX_VALUE; +} +bool PhysicalDevice::has_separate_compute_queue() const { + return detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != detail::QUEUE_INDEX_MAX_VALUE; +} +bool PhysicalDevice::has_dedicated_transfer_queue() const { + return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != detail::QUEUE_INDEX_MAX_VALUE; +} +bool PhysicalDevice::has_separate_transfer_queue() const { + return detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != detail::QUEUE_INDEX_MAX_VALUE; +} +std::vector PhysicalDevice::get_queue_families() const { return queue_families; } +std::vector PhysicalDevice::get_extensions() const { return extensions_to_enable; } +std::vector PhysicalDevice::get_available_extensions() const { return available_extensions; } +bool PhysicalDevice::is_extension_present(const char* ext) const { + return std::find_if(std::begin(available_extensions), std::end(available_extensions), [ext](std::string const& ext_name) { + return ext_name == ext; + }) != std::end(available_extensions); +} +bool PhysicalDevice::enable_extension_if_present(const char* extension) { + auto it = std::find_if(std::begin(available_extensions), + std::end(available_extensions), + [extension](std::string const& ext_name) { return ext_name == extension; }); + if (it != std::end(available_extensions)) { + extensions_to_enable.push_back(extension); + return true; + } + return false; +} +bool PhysicalDevice::enable_extensions_if_present(const std::vector& extensions) { + for (const auto extension : extensions) { + auto it = std::find_if(std::begin(available_extensions), + std::end(available_extensions), + [extension](std::string const& ext_name) { return ext_name == extension; }); + if (it == std::end(available_extensions)) return false; + } + for (const auto extension : extensions) + extensions_to_enable.push_back(extension); + return true; +} + +bool PhysicalDevice::enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable) { + VkPhysicalDeviceFeatures actual_pdf{}; + detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures(physical_device, &actual_pdf); + + bool required_features_supported = detail::supports_features(actual_pdf, features_to_enable, {}, {}); + if (required_features_supported) { + detail::combine_features(features, features_to_enable); + } + return required_features_supported; +} + +bool PhysicalDevice::is_features_node_present(detail::GenericFeaturesPNextNode const& node) const { + detail::GenericFeatureChain requested_features; + requested_features.nodes.push_back(node); + + return extended_features_chain.find_and_match(requested_features); +} + +bool PhysicalDevice::enable_features_node_if_present(detail::GenericFeaturesPNextNode const& node) { + VkPhysicalDeviceFeatures2 actual_pdf2{}; + + detail::GenericFeatureChain requested_features; + requested_features.nodes.push_back(node); + + detail::GenericFeatureChain fill_chain = requested_features; + // Zero out supported features + memset(fill_chain.nodes.front().fields, UINT8_MAX, sizeof(VkBool32) * detail::GenericFeaturesPNextNode::field_capacity); + + actual_pdf2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + fill_chain.chain_up(actual_pdf2); + + bool required_features_supported = false; + bool instance_is_1_1 = instance_version >= VKB_VK_API_VERSION_1_1; + if (instance_is_1_1 || properties2_ext_enabled) { + if (instance_is_1_1) { + detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2(physical_device, &actual_pdf2); + } else { + detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2KHR(physical_device, &actual_pdf2); + } + required_features_supported = fill_chain.match_all(requested_features); + if (required_features_supported) { + extended_features_chain.combine(requested_features); + } + } + return required_features_supported; +} + + +PhysicalDevice::operator VkPhysicalDevice() const { return this->physical_device; } + +// ---- Queues ---- // + +Result Device::get_queue_index(QueueType type) const { + uint32_t index = detail::QUEUE_INDEX_MAX_VALUE; + switch (type) { + case QueueType::present: + index = detail::get_present_queue_index(physical_device.physical_device, surface, queue_families); + if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::present_unavailable }; + break; + case QueueType::graphics: + index = detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT); + if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::graphics_unavailable }; + break; + case QueueType::compute: + index = detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); + if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::compute_unavailable }; + break; + case QueueType::transfer: + index = detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT); + if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::transfer_unavailable }; + break; + default: + return Result{ QueueError::invalid_queue_family_index }; + } + return index; +} +Result Device::get_dedicated_queue_index(QueueType type) const { + uint32_t index = detail::QUEUE_INDEX_MAX_VALUE; + switch (type) { + case QueueType::compute: + index = detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); + if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::compute_unavailable }; + break; + case QueueType::transfer: + index = detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT); + if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::transfer_unavailable }; + break; + default: + return Result{ QueueError::invalid_queue_family_index }; + } + return index; +} + +Result Device::get_queue(QueueType type) const { + auto index = get_queue_index(type); + if (!index.has_value()) return { index.error() }; + VkQueue out_queue; + internal_table.fp_vkGetDeviceQueue(device, index.value(), 0, &out_queue); + return out_queue; +} +Result Device::get_dedicated_queue(QueueType type) const { + auto index = get_dedicated_queue_index(type); + if (!index.has_value()) return { index.error() }; + VkQueue out_queue; + internal_table.fp_vkGetDeviceQueue(device, index.value(), 0, &out_queue); + return out_queue; +} + +// ---- Dispatch ---- // + +DispatchTable Device::make_table() const { return { device, fp_vkGetDeviceProcAddr }; } + +// ---- Device ---- // + +Device::operator VkDevice() const { return this->device; } + +CustomQueueDescription::CustomQueueDescription(uint32_t index, std::vector priorities) + : index(index), priorities(std::move(priorities)) {} + +void destroy_device(Device const& device) { + device.internal_table.fp_vkDestroyDevice(device.device, device.allocation_callbacks); +} + +DeviceBuilder::DeviceBuilder(PhysicalDevice phys_device) { physical_device = std::move(phys_device); } + +Result DeviceBuilder::build() const { + + std::vector queue_descriptions; + queue_descriptions.insert(queue_descriptions.end(), info.queue_descriptions.begin(), info.queue_descriptions.end()); + + if (queue_descriptions.empty()) { + for (uint32_t i = 0; i < physical_device.queue_families.size(); i++) { + queue_descriptions.emplace_back(i, std::vector{ 1.0f }); + } + } + + std::vector queueCreateInfos; + for (auto& desc : queue_descriptions) { + VkDeviceQueueCreateInfo queue_create_info = {}; + queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_info.queueFamilyIndex = desc.index; + queue_create_info.queueCount = static_cast(desc.priorities.size()); + queue_create_info.pQueuePriorities = desc.priorities.data(); + queueCreateInfos.push_back(queue_create_info); + } + + std::vector extensions_to_enable; + for (const auto& ext : physical_device.extensions_to_enable) { + extensions_to_enable.push_back(ext.c_str()); + } + if (physical_device.surface != VK_NULL_HANDLE || physical_device.defer_surface_initialization) + extensions_to_enable.push_back({ VK_KHR_SWAPCHAIN_EXTENSION_NAME }); + + std::vector final_pnext_chain; + VkDeviceCreateInfo device_create_info = {}; + + bool user_defined_phys_dev_features_2 = false; + for (auto& pnext : info.pNext_chain) { + if (pnext->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2) { + user_defined_phys_dev_features_2 = true; + break; + } + } + + if (user_defined_phys_dev_features_2 && !physical_device.extended_features_chain.nodes.empty()) { + return { DeviceError::VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features }; + } + + // These objects must be alive during the call to vkCreateDevice + auto physical_device_extension_features_copy = physical_device.extended_features_chain; + VkPhysicalDeviceFeatures2 local_features2{}; + local_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + local_features2.features = physical_device.features; + + if (!user_defined_phys_dev_features_2) { + if (physical_device.instance_version >= VKB_VK_API_VERSION_1_1 || physical_device.properties2_ext_enabled) { + final_pnext_chain.push_back(reinterpret_cast(&local_features2)); + for (auto& features_node : physical_device_extension_features_copy.nodes) { + final_pnext_chain.push_back(reinterpret_cast(&features_node)); + } + } else { + // Only set device_create_info.pEnabledFeatures when the pNext chain does not contain a VkPhysicalDeviceFeatures2 structure + device_create_info.pEnabledFeatures = &physical_device.features; + } + } + + for (auto& pnext : info.pNext_chain) { + final_pnext_chain.push_back(pnext); + } + + detail::setup_pNext_chain(device_create_info, final_pnext_chain); +#if !defined(NDEBUG) + for (auto& node : final_pnext_chain) { + assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO); + } +#endif + device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + device_create_info.flags = info.flags; + device_create_info.queueCreateInfoCount = static_cast(queueCreateInfos.size()); + device_create_info.pQueueCreateInfos = queueCreateInfos.data(); + device_create_info.enabledExtensionCount = static_cast(extensions_to_enable.size()); + device_create_info.ppEnabledExtensionNames = extensions_to_enable.data(); + + Device device; + + VkResult res = detail::vulkan_functions().fp_vkCreateDevice( + physical_device.physical_device, &device_create_info, info.allocation_callbacks, &device.device); + if (res != VK_SUCCESS) { + return { DeviceError::failed_create_device, res }; + } + + device.physical_device = physical_device; + device.surface = physical_device.surface; + device.queue_families = physical_device.queue_families; + device.allocation_callbacks = info.allocation_callbacks; + device.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr; + detail::vulkan_functions().get_device_proc_addr(device.device, device.internal_table.fp_vkGetDeviceQueue, "vkGetDeviceQueue"); + detail::vulkan_functions().get_device_proc_addr(device.device, device.internal_table.fp_vkDestroyDevice, "vkDestroyDevice"); + device.instance_version = physical_device.instance_version; + return device; +} +DeviceBuilder& DeviceBuilder::custom_queue_setup(std::vector queue_descriptions) { + info.queue_descriptions = std::move(queue_descriptions); + return *this; +} +DeviceBuilder& DeviceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { + info.allocation_callbacks = callbacks; + return *this; +} + +// ---- Swapchain ---- // + +GFXRECON_BEGIN_NAMESPACE(detail) + +struct SurfaceSupportDetails { + VkSurfaceCapabilitiesKHR capabilities; + std::vector formats; + std::vector present_modes; +}; + +enum class SurfaceSupportError { + surface_handle_null, + failed_get_surface_capabilities, + failed_enumerate_surface_formats, + failed_enumerate_present_modes, + no_suitable_desired_format +}; + +struct SurfaceSupportErrorCategory : std::error_category { + const char* name() const noexcept override { return "vbk_surface_support"; } + std::string message(int err) const override { + switch (static_cast(err)) { + CASE_TO_STRING(SurfaceSupportError, surface_handle_null) + CASE_TO_STRING(SurfaceSupportError, failed_get_surface_capabilities) + CASE_TO_STRING(SurfaceSupportError, failed_enumerate_surface_formats) + CASE_TO_STRING(SurfaceSupportError, failed_enumerate_present_modes) + CASE_TO_STRING(SurfaceSupportError, no_suitable_desired_format) + default: + return ""; + } + } +}; +const SurfaceSupportErrorCategory surface_support_error_category; + +std::error_code make_error_code(SurfaceSupportError surface_support_error) { + return { static_cast(surface_support_error), detail::surface_support_error_category }; +} + +Result query_surface_support_details(VkPhysicalDevice phys_device, VkSurfaceKHR surface) { + if (surface == VK_NULL_HANDLE) return make_error_code(SurfaceSupportError::surface_handle_null); + + VkSurfaceCapabilitiesKHR capabilities; + VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities); + if (res != VK_SUCCESS) { + return { make_error_code(SurfaceSupportError::failed_get_surface_capabilities), res }; + } + + std::vector formats; + std::vector present_modes; + + auto formats_ret = detail::get_vector( + formats, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, phys_device, surface); + if (formats_ret != VK_SUCCESS) + return { make_error_code(SurfaceSupportError::failed_enumerate_surface_formats), formats_ret }; + auto present_modes_ret = detail::get_vector( + present_modes, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, phys_device, surface); + if (present_modes_ret != VK_SUCCESS) + return { make_error_code(SurfaceSupportError::failed_enumerate_present_modes), present_modes_ret }; + + return SurfaceSupportDetails{ capabilities, formats, present_modes }; +} + +Result find_desired_surface_format( + std::vector const& available_formats, std::vector const& desired_formats) { + for (auto const& desired_format : desired_formats) { + for (auto const& available_format : available_formats) { + // finds the first format that is desired and available + if (desired_format.format == available_format.format && desired_format.colorSpace == available_format.colorSpace) { + return desired_format; + } + } + } + + // if no desired format is available, we report that no format is suitable to the user request + return { make_error_code(SurfaceSupportError::no_suitable_desired_format) }; +} + +VkSurfaceFormatKHR find_best_surface_format( + std::vector const& available_formats, std::vector const& desired_formats) { + auto surface_format_ret = detail::find_desired_surface_format(available_formats, desired_formats); + if (surface_format_ret.has_value()) return surface_format_ret.value(); + + // use the first available format as a fallback if any desired formats aren't found + return available_formats[0]; +} + +VkPresentModeKHR find_present_mode(std::vector const& available_resent_modes, + std::vector const& desired_present_modes) { + for (auto const& desired_pm : desired_present_modes) { + for (auto const& available_pm : available_resent_modes) { + // finds the first present mode that is desired and available + if (desired_pm == available_pm) return desired_pm; + } + } + // only present mode required, use as a fallback + return VK_PRESENT_MODE_FIFO_KHR; +} + +template T minimum(T a, T b) { return a < b ? a : b; } +template T maximum(T a, T b) { return a > b ? a : b; } + +VkExtent2D find_extent(VkSurfaceCapabilitiesKHR const& capabilities, uint32_t desired_width, uint32_t desired_height) { + if (capabilities.currentExtent.width != UINT32_MAX) { + return capabilities.currentExtent; + } else { + VkExtent2D actualExtent = { desired_width, desired_height }; + + actualExtent.width = + maximum(capabilities.minImageExtent.width, minimum(capabilities.maxImageExtent.width, actualExtent.width)); + actualExtent.height = + maximum(capabilities.minImageExtent.height, minimum(capabilities.maxImageExtent.height, actualExtent.height)); + + return actualExtent; + } +} + +GFXRECON_END_NAMESPACE(detail) + +void destroy_swapchain(Swapchain const& swapchain) { + if (swapchain.device != VK_NULL_HANDLE && swapchain.swapchain != VK_NULL_HANDLE) { + swapchain.internal_table.fp_vkDestroySwapchainKHR(swapchain.device, swapchain.swapchain, swapchain.allocation_callbacks); + } +} + +SwapchainBuilder::SwapchainBuilder(Device const& device) { + info.physical_device = device.physical_device.physical_device; + info.device = device.device; + info.surface = device.surface; + info.instance_version = device.instance_version; + auto present = device.get_queue_index(QueueType::present); + auto graphics = device.get_queue_index(QueueType::graphics); + assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid"); + info.graphics_queue_index = present.value(); + info.present_queue_index = graphics.value(); + info.allocation_callbacks = device.allocation_callbacks; +} +SwapchainBuilder::SwapchainBuilder(Device const& device, VkSurfaceKHR const surface) { + info.physical_device = device.physical_device.physical_device; + info.device = device.device; + info.surface = surface; + info.instance_version = device.instance_version; + Device temp_device = device; + temp_device.surface = surface; + auto present = temp_device.get_queue_index(QueueType::present); + auto graphics = temp_device.get_queue_index(QueueType::graphics); + assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid"); + info.graphics_queue_index = graphics.value(); + info.present_queue_index = present.value(); + info.allocation_callbacks = device.allocation_callbacks; +} +SwapchainBuilder::SwapchainBuilder(VkPhysicalDevice const physical_device, + VkDevice const device, + VkSurfaceKHR const surface, + uint32_t graphics_queue_index, + uint32_t present_queue_index) { + info.physical_device = physical_device; + info.device = device; + info.surface = surface; + info.graphics_queue_index = graphics_queue_index; + info.present_queue_index = present_queue_index; + if (graphics_queue_index == detail::QUEUE_INDEX_MAX_VALUE || present_queue_index == detail::QUEUE_INDEX_MAX_VALUE) { + auto queue_families = detail::get_vector_noerror( + detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, physical_device); + if (graphics_queue_index == detail::QUEUE_INDEX_MAX_VALUE) + info.graphics_queue_index = detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT); + if (present_queue_index == detail::QUEUE_INDEX_MAX_VALUE) + info.present_queue_index = detail::get_present_queue_index(physical_device, surface, queue_families); + } +} +Result SwapchainBuilder::build() const { + if (info.surface == VK_NULL_HANDLE) { + return Error{ SwapchainError::surface_handle_not_provided }; + } + + auto desired_formats = info.desired_formats; + if (desired_formats.size() == 0) add_desired_formats(desired_formats); + auto desired_present_modes = info.desired_present_modes; + if (desired_present_modes.size() == 0) add_desired_present_modes(desired_present_modes); + + auto surface_support_ret = detail::query_surface_support_details(info.physical_device, info.surface); + if (!surface_support_ret.has_value()) + return Error{ SwapchainError::failed_query_surface_support_details, surface_support_ret.vk_result() }; + auto surface_support = surface_support_ret.value(); + + uint32_t image_count = info.min_image_count; + if (info.required_min_image_count >= 1) { + if (info.required_min_image_count < surface_support.capabilities.minImageCount) + return make_error_code(SwapchainError::required_min_image_count_too_low); + + image_count = info.required_min_image_count; + } else if (info.min_image_count == 0) { + // We intentionally use minImageCount + 1 to maintain existing behavior, even if it typically results in triple buffering on most systems. + image_count = surface_support.capabilities.minImageCount + 1; + } else { + image_count = info.min_image_count; + if (image_count < surface_support.capabilities.minImageCount) + image_count = surface_support.capabilities.minImageCount; + } + if (surface_support.capabilities.maxImageCount > 0 && image_count > surface_support.capabilities.maxImageCount) { + image_count = surface_support.capabilities.maxImageCount; + } + + VkSurfaceFormatKHR surface_format = detail::find_best_surface_format(surface_support.formats, desired_formats); + + VkExtent2D extent = detail::find_extent(surface_support.capabilities, info.desired_width, info.desired_height); + + uint32_t image_array_layers = info.array_layer_count; + if (surface_support.capabilities.maxImageArrayLayers < info.array_layer_count) + image_array_layers = surface_support.capabilities.maxImageArrayLayers; + if (info.array_layer_count == 0) image_array_layers = 1; + + uint32_t queue_family_indices[] = { info.graphics_queue_index, info.present_queue_index }; + + + VkPresentModeKHR present_mode = detail::find_present_mode(surface_support.present_modes, desired_present_modes); + + // VkSurfaceCapabilitiesKHR::supportedUsageFlags is only only valid for some present modes. For shared present modes, we should also check VkSharedPresentSurfaceCapabilitiesKHR::sharedPresentSupportedUsageFlags. + auto is_unextended_present_mode = [](VkPresentModeKHR present_mode) { + return (present_mode == VK_PRESENT_MODE_IMMEDIATE_KHR) || (present_mode == VK_PRESENT_MODE_MAILBOX_KHR) || + (present_mode == VK_PRESENT_MODE_FIFO_KHR) || (present_mode == VK_PRESENT_MODE_FIFO_RELAXED_KHR); + }; + + if (is_unextended_present_mode(present_mode) && + (info.image_usage_flags & surface_support.capabilities.supportedUsageFlags) != info.image_usage_flags) { + return Error{ SwapchainError::required_usage_not_supported }; + } + + VkSurfaceTransformFlagBitsKHR pre_transform = info.pre_transform; + if (info.pre_transform == static_cast(0)) + pre_transform = surface_support.capabilities.currentTransform; + + VkSwapchainCreateInfoKHR swapchain_create_info = {}; + swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + detail::setup_pNext_chain(swapchain_create_info, info.pNext_chain); +#if !defined(NDEBUG) + for (auto& node : info.pNext_chain) { + assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO); + } +#endif + swapchain_create_info.flags = info.create_flags; + swapchain_create_info.surface = info.surface; + swapchain_create_info.minImageCount = image_count; + swapchain_create_info.imageFormat = surface_format.format; + swapchain_create_info.imageColorSpace = surface_format.colorSpace; + swapchain_create_info.imageExtent = extent; + swapchain_create_info.imageArrayLayers = image_array_layers; + swapchain_create_info.imageUsage = info.image_usage_flags; + + if (info.graphics_queue_index != info.present_queue_index) { + swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + swapchain_create_info.queueFamilyIndexCount = 2; + swapchain_create_info.pQueueFamilyIndices = queue_family_indices; + } else { + swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + } + + swapchain_create_info.preTransform = pre_transform; + swapchain_create_info.compositeAlpha = info.composite_alpha; + swapchain_create_info.presentMode = present_mode; + swapchain_create_info.clipped = info.clipped; + swapchain_create_info.oldSwapchain = info.old_swapchain; + Swapchain swapchain{}; + PFN_vkCreateSwapchainKHR swapchain_create_proc; + detail::vulkan_functions().get_device_proc_addr(info.device, swapchain_create_proc, "vkCreateSwapchainKHR"); + auto res = swapchain_create_proc(info.device, &swapchain_create_info, info.allocation_callbacks, &swapchain.swapchain); + + if (res != VK_SUCCESS) { + return Error{ SwapchainError::failed_create_swapchain, res }; + } + swapchain.device = info.device; + swapchain.image_format = surface_format.format; + swapchain.color_space = surface_format.colorSpace; + swapchain.image_usage_flags = info.image_usage_flags; + swapchain.extent = extent; + detail::vulkan_functions().get_device_proc_addr( + info.device, swapchain.internal_table.fp_vkGetSwapchainImagesKHR, "vkGetSwapchainImagesKHR"); + detail::vulkan_functions().get_device_proc_addr(info.device, swapchain.internal_table.fp_vkCreateImageView, "vkCreateImageView"); + detail::vulkan_functions().get_device_proc_addr(info.device, swapchain.internal_table.fp_vkDestroyImageView, "vkDestroyImageView"); + detail::vulkan_functions().get_device_proc_addr( + info.device, swapchain.internal_table.fp_vkDestroySwapchainKHR, "vkDestroySwapchainKHR"); + auto images = swapchain.get_images(); + if (!images) { + return Error{ SwapchainError::failed_get_swapchain_images }; + } + swapchain.requested_min_image_count = image_count; + swapchain.present_mode = present_mode; + swapchain.image_count = static_cast(images.value().size()); + swapchain.instance_version = info.instance_version; + swapchain.allocation_callbacks = info.allocation_callbacks; + return swapchain; +} +Result> Swapchain::get_images() { + std::vector swapchain_images; + + auto swapchain_images_ret = + detail::get_vector(swapchain_images, internal_table.fp_vkGetSwapchainImagesKHR, device, swapchain); + if (swapchain_images_ret != VK_SUCCESS) { + return Error{ SwapchainError::failed_get_swapchain_images, swapchain_images_ret }; + } + return swapchain_images; +} +Result> Swapchain::get_image_views() { return get_image_views(nullptr); } +Result> Swapchain::get_image_views(const void* pNext) { + const auto swapchain_images_ret = get_images(); + if (!swapchain_images_ret) return swapchain_images_ret.error(); + const auto& swapchain_images = swapchain_images_ret.value(); + + bool already_contains_image_view_usage = false; + while (pNext) { + if (reinterpret_cast(pNext)->sType == VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO) { + already_contains_image_view_usage = true; + break; + } + pNext = reinterpret_cast(pNext)->pNext; + } + VkImageViewUsageCreateInfo desired_flags{}; + desired_flags.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO; + desired_flags.pNext = pNext; + desired_flags.usage = image_usage_flags; + + std::vector views(swapchain_images.size()); + for (size_t i = 0; i < swapchain_images.size(); i++) { + VkImageViewCreateInfo createInfo = {}; + createInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + if (instance_version >= VKB_VK_API_VERSION_1_1 && !already_contains_image_view_usage) { + createInfo.pNext = &desired_flags; + } else { + createInfo.pNext = pNext; + } + + createInfo.image = swapchain_images[i]; + createInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; + createInfo.format = image_format; + createInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + createInfo.subresourceRange.baseMipLevel = 0; + createInfo.subresourceRange.levelCount = 1; + createInfo.subresourceRange.baseArrayLayer = 0; + createInfo.subresourceRange.layerCount = 1; + VkResult res = internal_table.fp_vkCreateImageView(device, &createInfo, allocation_callbacks, &views[i]); + if (res != VK_SUCCESS) return Error{ SwapchainError::failed_create_swapchain_image_views, res }; + } + return views; +} +void Swapchain::destroy_image_views(std::vector const& image_views) { + for (auto& image_view : image_views) { + internal_table.fp_vkDestroyImageView(device, image_view, allocation_callbacks); + } +} +Swapchain::operator VkSwapchainKHR() const { return this->swapchain; } +SwapchainBuilder& SwapchainBuilder::set_old_swapchain(VkSwapchainKHR old_swapchain) { + info.old_swapchain = old_swapchain; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_old_swapchain(Swapchain const& swapchain) { + info.old_swapchain = swapchain.swapchain; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_desired_extent(uint32_t width, uint32_t height) { + info.desired_width = width; + info.desired_height = height; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_desired_format(VkSurfaceFormatKHR format) { + info.desired_formats.insert(info.desired_formats.begin(), format); + return *this; +} +SwapchainBuilder& SwapchainBuilder::add_fallback_format(VkSurfaceFormatKHR format) { + info.desired_formats.push_back(format); + return *this; +} +SwapchainBuilder& SwapchainBuilder::use_default_format_selection() { + info.desired_formats.clear(); + add_desired_formats(info.desired_formats); + return *this; +} + +SwapchainBuilder& SwapchainBuilder::set_desired_present_mode(VkPresentModeKHR present_mode) { + info.desired_present_modes.insert(info.desired_present_modes.begin(), present_mode); + return *this; +} +SwapchainBuilder& SwapchainBuilder::add_fallback_present_mode(VkPresentModeKHR present_mode) { + info.desired_present_modes.push_back(present_mode); + return *this; +} +SwapchainBuilder& SwapchainBuilder::use_default_present_mode_selection() { + info.desired_present_modes.clear(); + add_desired_present_modes(info.desired_present_modes); + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { + info.allocation_callbacks = callbacks; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_image_usage_flags(VkImageUsageFlags usage_flags) { + info.image_usage_flags = usage_flags; + return *this; +} +SwapchainBuilder& SwapchainBuilder::add_image_usage_flags(VkImageUsageFlags usage_flags) { + info.image_usage_flags = info.image_usage_flags | usage_flags; + return *this; +} +SwapchainBuilder& SwapchainBuilder::use_default_image_usage_flags() { + info.image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_image_array_layer_count(uint32_t array_layer_count) { + info.array_layer_count = array_layer_count; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_desired_min_image_count(uint32_t min_image_count) { + info.min_image_count = min_image_count; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_required_min_image_count(uint32_t required_min_image_count) { + info.required_min_image_count = required_min_image_count; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_clipped(bool clipped) { + info.clipped = clipped; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_create_flags(VkSwapchainCreateFlagBitsKHR create_flags) { + info.create_flags = create_flags; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_pre_transform_flags(VkSurfaceTransformFlagBitsKHR pre_transform_flags) { + info.pre_transform = pre_transform_flags; + return *this; +} +SwapchainBuilder& SwapchainBuilder::set_composite_alpha_flags(VkCompositeAlphaFlagBitsKHR composite_alpha_flags) { + info.composite_alpha = composite_alpha_flags; + return *this; +} + +void SwapchainBuilder::add_desired_formats(std::vector& formats) const { + formats.push_back({ VK_FORMAT_B8G8R8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR }); + formats.push_back({ VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR }); +} +void SwapchainBuilder::add_desired_present_modes(std::vector& modes) const { + modes.push_back(VK_PRESENT_MODE_MAILBOX_KHR); + modes.push_back(VK_PRESENT_MODE_FIFO_KHR); +} + +GFXRECON_END_NAMESPACE(test) + +GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h new file mode 100644 index 0000000000..802a74a7de --- /dev/null +++ b/test/test_apps/common/test_app_base.h @@ -0,0 +1,1026 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#ifndef GFXRECON_TEST_APP_BASE_H +#define GFXRECON_TEST_APP_BASE_H + +#include +#include +#include + +#include +#include +#include + +#include + +#include "test_app_dispatch.h" +#include "util/defines.h" + +#ifdef VK_MAKE_API_VERSION +#define VKB_MAKE_VK_VERSION(variant, major, minor, patch) VK_MAKE_API_VERSION(variant, major, minor, patch) +#elif defined(VK_MAKE_VERSION) +#define VKB_MAKE_VK_VERSION(variant, major, minor, patch) VK_MAKE_VERSION(major, minor, patch) +#endif + +#if defined(VK_API_VERSION_1_3) || defined(VK_VERSION_1_3) +#define VKB_VK_API_VERSION_1_3 VKB_MAKE_VK_VERSION(0, 1, 3, 0) +#endif + +#if defined(VK_API_VERSION_1_2) || defined(VK_VERSION_1_2) +#define VKB_VK_API_VERSION_1_2 VKB_MAKE_VK_VERSION(0, 1, 2, 0) +#endif + +#if defined(VK_API_VERSION_1_1) || defined(VK_VERSION_1_1) +#define VKB_VK_API_VERSION_1_1 VKB_MAKE_VK_VERSION(0, 1, 1, 0) +#endif + +#if defined(VK_API_VERSION_1_0) || defined(VK_VERSION_1_0) +#define VKB_VK_API_VERSION_1_0 VKB_MAKE_VK_VERSION(0, 1, 0, 0) +#endif + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test) + +struct Error { + std::error_code type; + VkResult vk_result = VK_SUCCESS; // optional error value if a vulkan call failed +}; + +template class Result { + public: + Result(const T& value) noexcept : m_value{ value }, m_init{ true } {} + Result(T&& value) noexcept : m_value{ std::move(value) }, m_init{ true } {} + + Result(Error error) noexcept : m_error{ error }, m_init{ false } {} + + Result(std::error_code error_code, VkResult result = VK_SUCCESS) noexcept + : m_error{ error_code, result }, m_init{ false } {} + + ~Result() noexcept { destroy(); } + Result(Result const& expected) noexcept : m_init(expected.m_init) { + if (m_init) + new (&m_value) T{ expected.m_value }; + else + m_error = expected.m_error; + } + Result& operator=(Result const& result) noexcept { + m_init = result.m_init; + if (m_init) + new (&m_value) T{ result.m_value }; + else + m_error = result.m_error; + return *this; + } + Result(Result&& expected) noexcept : m_init(expected.m_init) { + if (m_init) + new (&m_value) T{ std::move(expected.m_value) }; + else + m_error = std::move(expected.m_error); + expected.destroy(); + } + Result& operator=(Result&& result) noexcept { + m_init = result.m_init; + if (m_init) + new (&m_value) T{ std::move(result.m_value) }; + else + m_error = std::move(result.m_error); + return *this; + } + Result& operator=(const T& expect) noexcept { + destroy(); + m_init = true; + new (&m_value) T{ expect }; + return *this; + } + Result& operator=(T&& expect) noexcept { + destroy(); + m_init = true; + new (&m_value) T{ std::move(expect) }; + return *this; + } + Result& operator=(const Error& error) noexcept { + destroy(); + m_init = false; + m_error = error; + return *this; + } + Result& operator=(Error&& error) noexcept { + destroy(); + m_init = false; + m_error = error; + return *this; + } + // clang-format off + const T* operator-> () const noexcept { assert (m_init); return &m_value; } + T* operator-> () noexcept { assert (m_init); return &m_value; } + const T& operator* () const& noexcept { assert (m_init); return m_value; } + T& operator* () & noexcept { assert (m_init); return m_value; } + T operator* () && noexcept { assert (m_init); return std::move (m_value); } + const T& value () const& noexcept { assert (m_init); return m_value; } + T& value () & noexcept { assert (m_init); return m_value; } + T value () && noexcept { assert (m_init); return std::move (m_value); } + + // std::error_code associated with the error + std::error_code error() const { assert (!m_init); return m_error.type; } + // optional VkResult that could of been produced due to the error + VkResult vk_result() const { assert (!m_init); return m_error.vk_result; } + // Returns the struct that holds the std::error_code and VkResult + Error full_error() const { assert (!m_init); return m_error; } + // clang-format on + + // check if the result has an error that matches a specific error case + template bool matches_error(E error_enum_value) const { + return !m_init && static_cast(m_error.type.value()) == error_enum_value; + } + + bool has_value() const { return m_init; } + explicit operator bool() const { return m_init; } + + private: + void destroy() { + if (m_init) m_value.~T(); + } + union { + T m_value; + Error m_error; + }; + bool m_init; +}; + +GFXRECON_BEGIN_NAMESPACE(detail) +struct GenericFeaturesPNextNode { + + static const uint32_t field_capacity = 256; + + GenericFeaturesPNextNode(); + + template GenericFeaturesPNextNode(T const& features) noexcept { + memset(fields, UINT8_MAX, sizeof(VkBool32) * field_capacity); + memcpy(this, &features, sizeof(T)); + } + + static bool match(GenericFeaturesPNextNode const& requested, GenericFeaturesPNextNode const& supported) noexcept; + + void combine(GenericFeaturesPNextNode const& right) noexcept; + + VkStructureType sType = static_cast(0); + void* pNext = nullptr; + VkBool32 fields[field_capacity]; +}; + +struct GenericFeatureChain { + std::vector nodes; + + template void add(T const& features) noexcept { + // If this struct is already in the list, combine it + for (auto& node : nodes) { + if (static_cast(features.sType) == node.sType) { + node.combine(features); + return; + } + } + // Otherwise append to the end + nodes.push_back(features); + } + + bool match_all(GenericFeatureChain const& extension_requested) const noexcept; + bool find_and_match(GenericFeatureChain const& extension_requested) const noexcept; + + void chain_up(VkPhysicalDeviceFeatures2& feats2) noexcept; + + void combine(GenericFeatureChain const& right) noexcept; +}; + +GFXRECON_END_NAMESPACE(detail) + +enum class InstanceError { + vulkan_unavailable, + vulkan_version_unavailable, + vulkan_version_1_1_unavailable, + vulkan_version_1_2_unavailable, + failed_create_instance, + failed_create_debug_messenger, + requested_layers_not_present, + requested_extensions_not_present, + windowing_extensions_not_present, +}; +enum class PhysicalDeviceError { + no_surface_provided, + failed_enumerate_physical_devices, + no_physical_devices_found, + no_suitable_device, +}; +enum class QueueError { + present_unavailable, + graphics_unavailable, + compute_unavailable, + transfer_unavailable, + queue_index_out_of_range, + invalid_queue_family_index +}; +enum class DeviceError { + failed_create_device, + VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features, +}; +enum class SwapchainError { + surface_handle_not_provided, + failed_query_surface_support_details, + failed_create_swapchain, + failed_get_swapchain_images, + failed_create_swapchain_image_views, + required_min_image_count_too_low, + required_usage_not_supported +}; + +std::error_code make_error_code(InstanceError instance_error); +std::error_code make_error_code(PhysicalDeviceError physical_device_error); +std::error_code make_error_code(QueueError queue_error); +std::error_code make_error_code(DeviceError device_error); +std::error_code make_error_code(SwapchainError swapchain_error); + +const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s); +const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s); + +const char* to_string(InstanceError err); +const char* to_string(PhysicalDeviceError err); +const char* to_string(QueueError err); +const char* to_string(DeviceError err); +const char* to_string(SwapchainError err); + +// Gathers useful information about the available vulkan capabilities, like layers and instance +// extensions. Use this for enabling features conditionally, ie if you would like an extension but +// can use a fallback if it isn't supported but need to know if support is available first. +struct SystemInfo { + private: + SystemInfo(); + + public: + // Use get_system_info to create a SystemInfo struct. This is because loading vulkan could fail. + static Result get_system_info(); + static Result get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr); + + // Returns true if a layer is available + bool is_layer_available(const char* layer_name) const; + // Returns true if an extension is available + bool is_extension_available(const char* extension_name) const; + + std::vector available_layers; + std::vector available_extensions; + bool validation_layers_available = false; + bool debug_utils_available = false; +}; + +// Forward declared - check VkBoostrap.cpp for implementations +const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s); +const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s); + +// Default debug messenger +// Feel free to copy-paste it into your own code, change it as needed, then call `set_debug_callback()` to use that instead +inline VKAPI_ATTR VkBool32 VKAPI_CALL default_debug_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageType, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void*) { + auto ms = to_string_message_severity(messageSeverity); + auto mt = to_string_message_type(messageType); + printf("[%s: %s]\n%s\n", ms, mt, pCallbackData->pMessage); + + return VK_FALSE; // Applications must return false here +} + +class InstanceBuilder; +class PhysicalDeviceSelector; + +struct Instance { + VkInstance instance = VK_NULL_HANDLE; + VkDebugUtilsMessengerEXT debug_messenger = VK_NULL_HANDLE; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; + PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; + + // A conversion function which allows this Instance to be used + // in places where VkInstance would have been used. + operator VkInstance() const; + + // Return a loaded instance dispatch table + InstanceDispatchTable make_table() const; + + private: + bool headless = false; + bool properties2_ext_enabled = false; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + uint32_t api_version = VKB_VK_API_VERSION_1_0; + + friend class InstanceBuilder; + friend class PhysicalDeviceSelector; +}; + +void destroy_surface(Instance const& instance, VkSurfaceKHR surface); // release surface handle +void destroy_surface(VkInstance instance, VkSurfaceKHR surface, VkAllocationCallbacks* callbacks = nullptr); // release surface handle +void destroy_instance(Instance const& instance); // release instance resources + +/* If headless mode is false, by default vk-bootstrap use the following logic to enable the windowing extensions + +#if defined(_WIN32) + VK_KHR_win32_surface +#elif defined(__linux__) + VK_KHR_xcb_surface + VK_KHR_xlib_surface + VK_KHR_wayland_surface +#elif defined(__APPLE__) + VK_EXT_metal_surface +#elif defined(__ANDROID__) + VK_KHR_android_surface +#elif defined(_DIRECT2DISPLAY) + VK_KHR_display +#endif + +Use `InstanceBuilder::enable_extension()` to add new extensions without altering the default behavior +Feel free to make a PR or raise an issue to include additional platforms. +*/ + +class InstanceBuilder { + public: + // Default constructor, will load vulkan. + explicit InstanceBuilder(); + // Optional: Can use your own PFN_vkGetInstanceProcAddr + explicit InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr); + + // Create a VkInstance. Return an error if it failed. + Result build() const; + + // Sets the name of the application. Defaults to "" if none is provided. + InstanceBuilder& set_app_name(const char* app_name); + // Sets the name of the engine. Defaults to "" if none is provided. + InstanceBuilder& set_engine_name(const char* engine_name); + + // Sets the version of the application. + // Should be constructed with VK_MAKE_VERSION or VK_MAKE_API_VERSION. + InstanceBuilder& set_app_version(uint32_t app_version); + // Sets the (major, minor, patch) version of the application. + InstanceBuilder& set_app_version(uint32_t major, uint32_t minor, uint32_t patch = 0); + + // Sets the version of the engine. + // Should be constructed with VK_MAKE_VERSION or VK_MAKE_API_VERSION. + InstanceBuilder& set_engine_version(uint32_t engine_version); + // Sets the (major, minor, patch) version of the engine. + InstanceBuilder& set_engine_version(uint32_t major, uint32_t minor, uint32_t patch = 0); + + // Require a vulkan API version. Will fail to create if this version isn't available. + // Should be constructed with VK_MAKE_VERSION or VK_MAKE_API_VERSION. + InstanceBuilder& require_api_version(uint32_t required_api_version); + // Require a vulkan API version. Will fail to create if this version isn't available. + InstanceBuilder& require_api_version(uint32_t major, uint32_t minor, uint32_t patch = 0); + + // Overrides required API version for instance creation. Will fail to create if this version isn't available. + // Should be constructed with VK_MAKE_VERSION or VK_MAKE_API_VERSION. + InstanceBuilder& set_minimum_instance_version(uint32_t minimum_instance_version); + // Overrides required API version for instance creation. Will fail to create if this version isn't available. + InstanceBuilder& set_minimum_instance_version(uint32_t major, uint32_t minor, uint32_t patch = 0); + + // Prefer a vulkan instance API version. If the desired version isn't available, it will use the + // highest version available. Should be constructed with VK_MAKE_VERSION or VK_MAKE_API_VERSION. + [[deprecated("Use require_api_version + set_minimum_instance_version instead.")]] InstanceBuilder& + desire_api_version(uint32_t preferred_vulkan_version); + + // Prefer a vulkan instance API version. If the desired version isn't available, it will use the highest version available. + [[deprecated("Use require_api_version + set_minimum_instance_version instead.")]] InstanceBuilder& + desire_api_version(uint32_t major, uint32_t minor, uint32_t patch = 0); + + // Adds a layer to be enabled. Will fail to create an instance if the layer isn't available. + InstanceBuilder& enable_layer(const char* layer_name); + // Adds an extension to be enabled. Will fail to create an instance if the extension isn't available. + InstanceBuilder& enable_extension(const char* extension_name); + InstanceBuilder& enable_extensions(std::vector const& extensions); + InstanceBuilder& enable_extensions(size_t count, const char* const* extensions); + + // Headless Mode does not load the required extensions for presentation. Defaults to true. + InstanceBuilder& set_headless(bool headless = true); + + // Enables the validation layers. Will fail to create an instance if the validation layers aren't available. + InstanceBuilder& enable_validation_layers(bool require_validation = true); + // Checks if the validation layers are available and loads them if they are. + InstanceBuilder& request_validation_layers(bool enable_validation = true); + + // Use a default debug callback that prints to standard out. + InstanceBuilder& use_default_debug_messenger(); + // Provide a user defined debug callback. + InstanceBuilder& set_debug_callback(PFN_vkDebugUtilsMessengerCallbackEXT callback); + // Sets the void* to use in the debug messenger - only useful with a custom callback + InstanceBuilder& set_debug_callback_user_data_pointer(void* user_data_pointer); + // Set what message severity is needed to trigger the callback. + InstanceBuilder& set_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity); + // Add a message severity to the list that triggers the callback. + InstanceBuilder& add_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity); + // Set what message type triggers the callback. + InstanceBuilder& set_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type); + // Add a message type to the list of that triggers the callback. + InstanceBuilder& add_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type); + + // Disable some validation checks. + // Checks: All, and Shaders + InstanceBuilder& add_validation_disable(VkValidationCheckEXT check); + + // Enables optional parts of the validation layers. + // Parts: best practices, gpu assisted, and gpu assisted reserve binding slot. + InstanceBuilder& add_validation_feature_enable(VkValidationFeatureEnableEXT enable); + + // Disables sections of the validation layers. + // Options: All, shaders, thread safety, api parameters, object lifetimes, core checks, and unique handles. + InstanceBuilder& add_validation_feature_disable(VkValidationFeatureDisableEXT disable); + + // Provide custom allocation callbacks. + InstanceBuilder& set_allocation_callbacks(VkAllocationCallbacks* callbacks); + + private: + struct InstanceInfo { + // VkApplicationInfo + const char* app_name = nullptr; + const char* engine_name = nullptr; + uint32_t application_version = 0; + uint32_t engine_version = 0; + uint32_t minimum_instance_version = 0; + uint32_t required_api_version = VKB_VK_API_VERSION_1_0; + uint32_t desired_api_version = VKB_VK_API_VERSION_1_0; + + // VkInstanceCreateInfo + std::vector layers; + std::vector extensions; + VkInstanceCreateFlags flags = static_cast(0); + std::vector pNext_elements; + + // debug callback - use the default so it is not nullptr + PFN_vkDebugUtilsMessengerCallbackEXT debug_callback = default_debug_callback; + VkDebugUtilsMessageSeverityFlagsEXT debug_message_severity = + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; + VkDebugUtilsMessageTypeFlagsEXT debug_message_type = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; + void* debug_user_data_pointer = nullptr; + + // validation features + std::vector disabled_validation_checks; + std::vector enabled_validation_features; + std::vector disabled_validation_features; + + // Custom allocator + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + + bool request_validation_layers = false; + bool enable_validation_layers = false; + bool use_debug_messenger = false; + bool headless_context = false; + + PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; + } info; +}; + +VKAPI_ATTR VkBool32 VKAPI_CALL default_debug_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageType, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +void destroy_debug_utils_messenger( + VkInstance const instance, VkDebugUtilsMessengerEXT const messenger, VkAllocationCallbacks* allocation_callbacks = nullptr); + +// ---- Physical Device ---- // +class PhysicalDeviceSelector; +class DeviceBuilder; + +struct PhysicalDevice { + std::string name; + VkPhysicalDevice physical_device = VK_NULL_HANDLE; + VkSurfaceKHR surface = VK_NULL_HANDLE; + + // Note that this reflects selected features carried over from required features, not all features the physical device supports. + VkPhysicalDeviceFeatures features{}; + VkPhysicalDeviceProperties properties{}; + VkPhysicalDeviceMemoryProperties memory_properties{}; + + // Has a queue family that supports compute operations but not graphics nor transfer. + bool has_dedicated_compute_queue() const; + // Has a queue family that supports transfer operations but not graphics nor compute. + bool has_dedicated_transfer_queue() const; + + // Has a queue family that supports transfer operations but not graphics. + bool has_separate_compute_queue() const; + // Has a queue family that supports transfer operations but not graphics. + bool has_separate_transfer_queue() const; + + // Advanced: Get the VkQueueFamilyProperties of the device if special queue setup is needed + std::vector get_queue_families() const; + + // Query the list of extensions which should be enabled + std::vector get_extensions() const; + + // Query the list of extensions which the physical device supports + std::vector get_available_extensions() const; + + // Returns true if an extension should be enabled on the device + bool is_extension_present(const char* extension) const; + + // Returns true if all the features are present + template bool are_extension_features_present(T const& features) const { + return is_features_node_present(detail::GenericFeaturesPNextNode(features)); + } + + // If the given extension is present, make the extension be enabled on the device. + // Returns true the extension is present. + bool enable_extension_if_present(const char* extension); + + // If all the given extensions are present, make all the extensions be enabled on the device. + // Returns true if all the extensions are present. + bool enable_extensions_if_present(const std::vector& extensions); + + // If the features from VkPhysicalDeviceFeatures are all present, make all of the features be enable on the device. + // Returns true if all the features are present. + bool enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable); + + // If the features from the provided features struct are all present, make all of the features be enable on the + // device. Returns true if all of the features are present. + template bool enable_extension_features_if_present(T const& features_check) { + return enable_features_node_if_present(detail::GenericFeaturesPNextNode(features_check)); + } + + // A conversion function which allows this PhysicalDevice to be used + // in places where VkPhysicalDevice would have been used. + operator VkPhysicalDevice() const; + + private: + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + std::vector extensions_to_enable; + std::vector available_extensions; + std::vector queue_families; + detail::GenericFeatureChain extended_features_chain; + + bool defer_surface_initialization = false; + bool properties2_ext_enabled = false; + enum class Suitable { yes, partial, no }; + Suitable suitable = Suitable::yes; + friend class PhysicalDeviceSelector; + friend class DeviceBuilder; + + bool is_features_node_present(detail::GenericFeaturesPNextNode const& node) const; + bool enable_features_node_if_present(detail::GenericFeaturesPNextNode const& node); +}; + +enum class PreferredDeviceType { other = 0, integrated = 1, discrete = 2, virtual_gpu = 3, cpu = 4 }; + +enum class DeviceSelectionMode { + // return all suitable and partially suitable devices + partially_and_fully_suitable, + // return only physical devices which are fully suitable + only_fully_suitable +}; + +// Enumerates the physical devices on the system, and based on the added criteria, returns a physical device or list of physical devies +// A device is considered suitable if it meets all the 'required' and 'desired' criteria. +// A device is considered partially suitable if it meets only the 'required' criteria. +class PhysicalDeviceSelector { + public: + // Requires a gfxrecon::test::Instance to construct, needed to pass instance creation info. + explicit PhysicalDeviceSelector(Instance const& instance); + // Requires a gfxrecon::test::Instance to construct, needed to pass instance creation info, optionally specify the surface here + explicit PhysicalDeviceSelector(Instance const& instance, VkSurfaceKHR surface); + + // Return the first device which is suitable + // use the `selection` parameter to configure if partially + Result select(DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; + + // Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device + Result> select_devices( + DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; + + // Return the names of all devices which are considered suitable - intended for applications which want to let the user pick the physical device + Result> select_device_names( + DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; + + // Set the surface in which the physical device should render to. + // Be sure to set it if swapchain functionality is to be used. + PhysicalDeviceSelector& set_surface(VkSurfaceKHR surface); + + // Set the name of the device to select. + PhysicalDeviceSelector& set_name(std::string const& name); + // Set the desired physical device type to select. Defaults to PreferredDeviceType::discrete. + PhysicalDeviceSelector& prefer_gpu_device_type(PreferredDeviceType type = PreferredDeviceType::discrete); + // Allow selection of a gpu device type that isn't the preferred physical device type. Defaults to true. + PhysicalDeviceSelector& allow_any_gpu_device_type(bool allow_any_type = true); + + // Require that a physical device supports presentation. Defaults to true. + PhysicalDeviceSelector& require_present(bool require = true); + + // Require a queue family that supports compute operations but not graphics nor transfer. + PhysicalDeviceSelector& require_dedicated_compute_queue(); + // Require a queue family that supports transfer operations but not graphics nor compute. + PhysicalDeviceSelector& require_dedicated_transfer_queue(); + + // Require a queue family that supports compute operations but not graphics. + PhysicalDeviceSelector& require_separate_compute_queue(); + // Require a queue family that supports transfer operations but not graphics. + PhysicalDeviceSelector& require_separate_transfer_queue(); + + // Require a memory heap from VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT with `size` memory available. + PhysicalDeviceSelector& required_device_memory_size(VkDeviceSize size); + // Prefer a memory heap from VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT with `size` memory available. + [[deprecated]] PhysicalDeviceSelector& desired_device_memory_size(VkDeviceSize size); + + // Require a physical device which supports a specific extension. + PhysicalDeviceSelector& add_required_extension(const char* extension); + // Require a physical device which supports a set of extensions. + PhysicalDeviceSelector& add_required_extensions(std::vector const& extensions); + PhysicalDeviceSelector& add_required_extensions(size_t count, const char* const* extensions); + + // Prefer a physical device which supports a specific extension. + [[deprecated("Use gfxrecon::test::PhysicalDevice::enable_extension_if_present instead")]] PhysicalDeviceSelector& + add_desired_extension(const char* extension); + // Prefer a physical device which supports a set of extensions. + [[deprecated("Use gfxrecon::test::PhysicalDevice::enable_extensions_if_present instead")]] PhysicalDeviceSelector& + add_desired_extensions(const std::vector& extensions); + + // Prefer a physical device that supports a (major, minor) version of vulkan. + [[deprecated("Use set_minimum_version + InstanceBuilder::require_api_version.")]] PhysicalDeviceSelector& + set_desired_version(uint32_t major, uint32_t minor); + + // Require a physical device that supports a (major, minor) version of vulkan. + PhysicalDeviceSelector& set_minimum_version(uint32_t major, uint32_t minor); + + // By default PhysicalDeviceSelector enables the portability subset if available + // This function disables that behavior + PhysicalDeviceSelector& disable_portability_subset(); + + // Require a physical device which supports a specific set of general/extension features. + // If this function is used, the user should not put their own VkPhysicalDeviceFeatures2 in + // the pNext chain of VkDeviceCreateInfo. + template PhysicalDeviceSelector& add_required_extension_features(T const& features) { + criteria.extended_features_chain.add(features); + return *this; + } + + // Require a physical device which supports the features in VkPhysicalDeviceFeatures. + PhysicalDeviceSelector& set_required_features(VkPhysicalDeviceFeatures const& features); +#if defined(VKB_VK_API_VERSION_1_2) + // Require a physical device which supports the features in VkPhysicalDeviceVulkan11Features. + // Must have vulkan version 1.2 - This is due to the VkPhysicalDeviceVulkan11Features struct being added in 1.2, not 1.1 + PhysicalDeviceSelector& set_required_features_11(VkPhysicalDeviceVulkan11Features const& features_11); + // Require a physical device which supports the features in VkPhysicalDeviceVulkan12Features. + // Must have vulkan version 1.2 + PhysicalDeviceSelector& set_required_features_12(VkPhysicalDeviceVulkan12Features const& features_12); +#endif +#if defined(VKB_VK_API_VERSION_1_3) + // Require a physical device which supports the features in VkPhysicalDeviceVulkan13Features. + // Must have vulkan version 1.3 + PhysicalDeviceSelector& set_required_features_13(VkPhysicalDeviceVulkan13Features const& features_13); +#endif + + // Used when surface creation happens after physical device selection. + // Warning: This disables checking if the physical device supports a given surface. + PhysicalDeviceSelector& defer_surface_initialization(); + + // Ignore all criteria and choose the first physical device that is available. + // Only use when: The first gpu in the list may be set by global user preferences and an application may wish to respect it. + PhysicalDeviceSelector& select_first_device_unconditionally(bool unconditionally = true); + + private: + struct InstanceInfo { + VkInstance instance = VK_NULL_HANDLE; + VkSurfaceKHR surface = VK_NULL_HANDLE; + uint32_t version = VKB_VK_API_VERSION_1_0; + bool headless = false; + bool properties2_ext_enabled = false; + } instance_info; + + // We copy the extension features stored in the selector criteria under the prose of a + // "template" to ensure that after fetching everything is compared 1:1 during a match. + + struct SelectionCriteria { + std::string name; + PreferredDeviceType preferred_type = PreferredDeviceType::discrete; + bool allow_any_type = true; + bool require_present = true; + bool require_dedicated_transfer_queue = false; + bool require_dedicated_compute_queue = false; + bool require_separate_transfer_queue = false; + bool require_separate_compute_queue = false; + VkDeviceSize required_mem_size = 0; + VkDeviceSize desired_mem_size = 0; + + std::vector required_extensions; + std::vector desired_extensions; + + uint32_t required_version = VKB_VK_API_VERSION_1_0; + uint32_t desired_version = VKB_VK_API_VERSION_1_0; + + VkPhysicalDeviceFeatures required_features{}; + VkPhysicalDeviceFeatures2 required_features2{}; + + detail::GenericFeatureChain extended_features_chain; + bool defer_surface_initialization = false; + bool use_first_gpu_unconditionally = false; + bool enable_portability_subset = true; + } criteria; + + PhysicalDevice populate_device_details( + VkPhysicalDevice phys_device, detail::GenericFeatureChain const& src_extended_features_chain) const; + + PhysicalDevice::Suitable is_device_suitable(PhysicalDevice const& phys_device) const; + + Result> select_impl(DeviceSelectionMode selection) const; +}; + +// ---- Queue ---- // +enum class QueueType { present, graphics, compute, transfer }; + +GFXRECON_BEGIN_NAMESPACE(detail) + +// Sentinel value, used in implementation only +inline const uint32_t QUEUE_INDEX_MAX_VALUE = 65536; + +GFXRECON_END_NAMESPACE(detail) + +// ---- Device ---- // + +struct Device { + VkDevice device = VK_NULL_HANDLE; + PhysicalDevice physical_device; + VkSurfaceKHR surface = VK_NULL_HANDLE; + std::vector queue_families; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + + Result get_queue_index(QueueType type) const; + // Only a compute or transfer queue type is valid. All other queue types do not support a 'dedicated' queue index + Result get_dedicated_queue_index(QueueType type) const; + + Result get_queue(QueueType type) const; + // Only a compute or transfer queue type is valid. All other queue types do not support a 'dedicated' queue + Result get_dedicated_queue(QueueType type) const; + + // Return a loaded dispatch table + DispatchTable make_table() const; + + // A conversion function which allows this Device to be used + // in places where VkDevice would have been used. + operator VkDevice() const; + + private: + struct { + PFN_vkGetDeviceQueue fp_vkGetDeviceQueue = nullptr; + PFN_vkDestroyDevice fp_vkDestroyDevice = nullptr; + } internal_table; + friend class DeviceBuilder; + friend void destroy_device(Device const& device); +}; + + +// For advanced device queue setup +struct CustomQueueDescription { + explicit CustomQueueDescription(uint32_t index, std::vector priorities); + uint32_t index = 0; + std::vector priorities; +}; + +void destroy_device(Device const& device); + +class DeviceBuilder { + public: + // Any features and extensions that are requested/required in PhysicalDeviceSelector are automatically enabled. + explicit DeviceBuilder(PhysicalDevice physical_device); + + Result build() const; + + // For Advanced Users: specify the exact list of VkDeviceQueueCreateInfo's needed for the application. + // If a custom queue setup is provided, getting the queues and queue indexes is up to the application. + DeviceBuilder& custom_queue_setup(std::vector queue_descriptions); + + // Add a structure to the pNext chain of VkDeviceCreateInfo. + // The structure must be valid when DeviceBuilder::build() is called. + template DeviceBuilder& add_pNext(T* structure) { + info.pNext_chain.push_back(reinterpret_cast(structure)); + return *this; + } + + // Provide custom allocation callbacks. + DeviceBuilder& set_allocation_callbacks(VkAllocationCallbacks* callbacks); + + private: + PhysicalDevice physical_device; + struct DeviceInfo { + VkDeviceCreateFlags flags = static_cast(0); + std::vector pNext_chain; + std::vector queue_descriptions; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + } info; +}; + +// ---- Swapchain ---- // +struct Swapchain { + VkDevice device = VK_NULL_HANDLE; + VkSwapchainKHR swapchain = VK_NULL_HANDLE; + uint32_t image_count = 0; + VkFormat image_format = VK_FORMAT_UNDEFINED; // The image format actually used when creating the swapchain. + VkColorSpaceKHR color_space = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; // The color space actually used when creating the swapchain. + VkImageUsageFlags image_usage_flags = 0; + VkExtent2D extent = { 0, 0 }; + // The value of minImageCount actually used when creating the swapchain; note that the presentation engine is always free to create more images than that. + uint32_t requested_min_image_count = 0; + VkPresentModeKHR present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR; // The present mode actually used when creating the swapchain. + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + + // Returns a vector of VkImage handles to the swapchain. + Result> get_images(); + + // Returns a vector of VkImageView's to the VkImage's of the swapchain. + // VkImageViews must be destroyed. The pNext chain must be a nullptr or a valid + // structure. + Result> get_image_views(); + Result> get_image_views(const void* pNext); + void destroy_image_views(std::vector const& image_views); + + // A conversion function which allows this Swapchain to be used + // in places where VkSwapchainKHR would have been used. + operator VkSwapchainKHR() const; + + private: + struct { + PFN_vkGetSwapchainImagesKHR fp_vkGetSwapchainImagesKHR = nullptr; + PFN_vkCreateImageView fp_vkCreateImageView = nullptr; + PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; + PFN_vkDestroySwapchainKHR fp_vkDestroySwapchainKHR = nullptr; + } internal_table; + friend class SwapchainBuilder; + friend void destroy_swapchain(Swapchain const& swapchain); +}; + +void destroy_swapchain(Swapchain const& swapchain); + +class SwapchainBuilder { + public: + // Construct a SwapchainBuilder with a `gfxrecon::test::Device` + explicit SwapchainBuilder(Device const& device); + // Construct a SwapchainBuilder with a specific VkSurfaceKHR handle and `gfxrecon::test::Device` + explicit SwapchainBuilder(Device const& device, VkSurfaceKHR const surface); + // Construct a SwapchainBuilder with Vulkan handles for the physical device, device, and surface + // Optionally can provide the uint32_t indices for the graphics and present queue + // Note: The constructor will query the graphics & present queue if the indices are not provided + explicit SwapchainBuilder(VkPhysicalDevice const physical_device, + VkDevice const device, + VkSurfaceKHR const surface, + uint32_t graphics_queue_index = detail::QUEUE_INDEX_MAX_VALUE, + uint32_t present_queue_index = detail::QUEUE_INDEX_MAX_VALUE); + + Result build() const; + + // Set the oldSwapchain member of VkSwapchainCreateInfoKHR. + // For use in rebuilding a swapchain. + SwapchainBuilder& set_old_swapchain(VkSwapchainKHR old_swapchain); + SwapchainBuilder& set_old_swapchain(Swapchain const& swapchain); + + + // Desired size of the swapchain. By default, the swapchain will use the size + // of the window being drawn to. + SwapchainBuilder& set_desired_extent(uint32_t width, uint32_t height); + + // When determining the surface format, make this the first to be used if supported. + SwapchainBuilder& set_desired_format(VkSurfaceFormatKHR format); + // Add this swapchain format to the end of the list of formats selected from. + SwapchainBuilder& add_fallback_format(VkSurfaceFormatKHR format); + // Use the default swapchain formats. This is done if no formats are provided. + // Default surface format is {VK_FORMAT_B8G8R8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR} + SwapchainBuilder& use_default_format_selection(); + + // When determining the present mode, make this the first to be used if supported. + SwapchainBuilder& set_desired_present_mode(VkPresentModeKHR present_mode); + // Add this present mode to the end of the list of present modes selected from. + SwapchainBuilder& add_fallback_present_mode(VkPresentModeKHR present_mode); + // Use the default presentation mode. This is done if no present modes are provided. + // Default present modes: VK_PRESENT_MODE_MAILBOX_KHR with fallback VK_PRESENT_MODE_FIFO_KHR + SwapchainBuilder& use_default_present_mode_selection(); + + // Set the bitmask of the image usage for acquired swapchain images. + // If the surface capabilities cannot allow it, building the swapchain will result in the `SwapchainError::required_usage_not_supported` error. + SwapchainBuilder& set_image_usage_flags(VkImageUsageFlags usage_flags); + // Add a image usage to the bitmask for acquired swapchain images. + SwapchainBuilder& add_image_usage_flags(VkImageUsageFlags usage_flags); + // Use the default image usage bitmask values. This is the default if no image usages + // are provided. The default is VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT + SwapchainBuilder& use_default_image_usage_flags(); + + // Set the number of views in for multiview/stereo surface + SwapchainBuilder& set_image_array_layer_count(uint32_t array_layer_count); + + // Convenient named constants for passing to set_desired_min_image_count(). + // Note that it is not an `enum class`, so its constants can be passed as an integer value without casting + // In other words, these might as well be `static const int`, but they benefit from being grouped together this way. + enum BufferMode { + SINGLE_BUFFERING = 1, + DOUBLE_BUFFERING = 2, + TRIPLE_BUFFERING = 3, + }; + + // Sets the desired minimum image count for the swapchain. + // Note that the presentation engine is always free to create more images than requested. + // You may pass one of the values specified in the BufferMode enum, or any integer value. + // For instance, if you pass DOUBLE_BUFFERING, the presentation engine is allowed to give you a double buffering setup, triple buffering, or more. This is up to the drivers. + SwapchainBuilder& set_desired_min_image_count(uint32_t min_image_count); + + // Sets a required minimum image count for the swapchain. + // If the surface capabilities cannot allow it, building the swapchain will result in the `SwapchainError::required_min_image_count_too_low` error. + // Otherwise, the same observations from set_desired_min_image_count() apply. + // A value of 0 is specially interpreted as meaning "no requirement", and is the behavior by default. + SwapchainBuilder& set_required_min_image_count(uint32_t required_min_image_count); + + // Set whether the Vulkan implementation is allowed to discard rendering operations that + // affect regions of the surface that are not visible. Default is true. + // Note: Applications should use the default of true if they do not expect to read back the content + // of presentable images before presenting them or after reacquiring them, and if their fragment + // shaders do not have any side effects that require them to run for all pixels in the presentable image. + SwapchainBuilder& set_clipped(bool clipped = true); + + // Set the VkSwapchainCreateFlagBitsKHR. + SwapchainBuilder& set_create_flags(VkSwapchainCreateFlagBitsKHR create_flags); + // Set the transform to be applied, like a 90 degree rotation. Default is no transform. + SwapchainBuilder& set_pre_transform_flags(VkSurfaceTransformFlagBitsKHR pre_transform_flags); + // Set the alpha channel to be used with other windows in on the system. Default is VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR. + SwapchainBuilder& set_composite_alpha_flags(VkCompositeAlphaFlagBitsKHR composite_alpha_flags); + + // Add a structure to the pNext chain of VkSwapchainCreateInfoKHR. + // The structure must be valid when SwapchainBuilder::build() is called. + template SwapchainBuilder& add_pNext(T* structure) { + info.pNext_chain.push_back(reinterpret_cast(structure)); + return *this; + } + + // Provide custom allocation callbacks. + SwapchainBuilder& set_allocation_callbacks(VkAllocationCallbacks* callbacks); + + private: + void add_desired_formats(std::vector& formats) const; + void add_desired_present_modes(std::vector& modes) const; + + struct SwapchainInfo { + VkPhysicalDevice physical_device = VK_NULL_HANDLE; + VkDevice device = VK_NULL_HANDLE; + std::vector pNext_chain; + VkSwapchainCreateFlagBitsKHR create_flags = static_cast(0); + VkSurfaceKHR surface = VK_NULL_HANDLE; + std::vector desired_formats; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + uint32_t desired_width = 256; + uint32_t desired_height = 256; + uint32_t array_layer_count = 1; + uint32_t min_image_count = 0; + uint32_t required_min_image_count = 0; + VkImageUsageFlags image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + uint32_t graphics_queue_index = 0; + uint32_t present_queue_index = 0; + VkSurfaceTransformFlagBitsKHR pre_transform = static_cast(0); +#if defined(__ANDROID__) + VkCompositeAlphaFlagBitsKHR composite_alpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR; +#else + VkCompositeAlphaFlagBitsKHR composite_alpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; +#endif + std::vector desired_present_modes; + bool clipped = true; + VkSwapchainKHR old_swapchain = VK_NULL_HANDLE; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + } info; +}; + +GFXRECON_END_NAMESPACE(test) + +GFXRECON_END_NAMESPACE(gfxrecon) + +namespace std { + +template <> struct is_error_code_enum : true_type {}; +template <> struct is_error_code_enum : true_type {}; +template <> struct is_error_code_enum : true_type {}; +template <> struct is_error_code_enum : true_type {}; +template <> struct is_error_code_enum : true_type {}; + +} // namespace std + +#endif // GFXRECON_TEST_APP_BASE_H diff --git a/test/test_apps/common/test_app_dispatch.h b/test/test_apps/common/test_app_dispatch.h new file mode 100644 index 0000000000..f18fdea99a --- /dev/null +++ b/test/test_apps/common/test_app_dispatch.h @@ -0,0 +1,8098 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#ifndef GFXRECON_TEST_APP_DISPATCH_H +#define GFXRECON_TEST_APP_DISPATCH_H + +#include + +#include "util/defines.h" + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test) + +struct InstanceDispatchTable { + InstanceDispatchTable() = default; + InstanceDispatchTable(VkInstance instance, PFN_vkGetInstanceProcAddr procAddr) : instance(instance), populated(true) { + fp_vkDestroyInstance = reinterpret_cast(procAddr(instance, "vkDestroyInstance")); + fp_vkEnumeratePhysicalDevices = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDevices")); + fp_vkGetInstanceProcAddr = reinterpret_cast(procAddr(instance, "vkGetInstanceProcAddr")); + fp_vkGetPhysicalDeviceProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties")); + fp_vkGetPhysicalDeviceQueueFamilyProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties")); + fp_vkGetPhysicalDeviceMemoryProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMemoryProperties")); + fp_vkGetPhysicalDeviceFeatures = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures")); + fp_vkGetPhysicalDeviceFormatProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFormatProperties")); + fp_vkGetPhysicalDeviceImageFormatProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties")); + fp_vkEnumerateDeviceLayerProperties = reinterpret_cast(procAddr(instance, "vkEnumerateDeviceLayerProperties")); + fp_vkEnumerateDeviceExtensionProperties = reinterpret_cast(procAddr(instance, "vkEnumerateDeviceExtensionProperties")); + fp_vkGetPhysicalDeviceSparseImageFormatProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties")); +#if (defined(VK_KHR_android_surface)) + fp_vkCreateAndroidSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateAndroidSurfaceKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkGetPhysicalDeviceDisplayPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayPropertiesKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkGetDisplayPlaneSupportedDisplaysKHR = reinterpret_cast(procAddr(instance, "vkGetDisplayPlaneSupportedDisplaysKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkGetDisplayModePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetDisplayModePropertiesKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkCreateDisplayModeKHR = reinterpret_cast(procAddr(instance, "vkCreateDisplayModeKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkGetDisplayPlaneCapabilitiesKHR = reinterpret_cast(procAddr(instance, "vkGetDisplayPlaneCapabilitiesKHR")); +#endif +#if (defined(VK_KHR_display)) + fp_vkCreateDisplayPlaneSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateDisplayPlaneSurfaceKHR")); +#endif +#if (defined(VK_KHR_surface)) + fp_vkDestroySurfaceKHR = reinterpret_cast(procAddr(instance, "vkDestroySurfaceKHR")); +#endif +#if (defined(VK_KHR_surface)) + fp_vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceSupportKHR")); +#endif +#if (defined(VK_KHR_surface)) + fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); +#endif +#if (defined(VK_KHR_surface)) + fp_vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR")); +#endif +#if (defined(VK_KHR_surface)) + fp_vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR")); +#endif +#if (defined(VK_NN_vi_surface)) + fp_vkCreateViSurfaceNN = reinterpret_cast(procAddr(instance, "vkCreateViSurfaceNN")); +#endif +#if (defined(VK_KHR_wayland_surface)) + fp_vkCreateWaylandSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateWaylandSurfaceKHR")); +#endif +#if (defined(VK_KHR_wayland_surface)) + fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR")); +#endif +#if (defined(VK_KHR_win32_surface)) + fp_vkCreateWin32SurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateWin32SurfaceKHR")); +#endif +#if (defined(VK_KHR_win32_surface)) + fp_vkGetPhysicalDeviceWin32PresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR")); +#endif +#if (defined(VK_KHR_xlib_surface)) + fp_vkCreateXlibSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateXlibSurfaceKHR")); +#endif +#if (defined(VK_KHR_xlib_surface)) + fp_vkGetPhysicalDeviceXlibPresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR")); +#endif +#if (defined(VK_KHR_xcb_surface)) + fp_vkCreateXcbSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateXcbSurfaceKHR")); +#endif +#if (defined(VK_KHR_xcb_surface)) + fp_vkGetPhysicalDeviceXcbPresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR")); +#endif +#if (defined(VK_EXT_directfb_surface)) + fp_vkCreateDirectFBSurfaceEXT = reinterpret_cast(procAddr(instance, "vkCreateDirectFBSurfaceEXT")); +#endif +#if (defined(VK_EXT_directfb_surface)) + fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT")); +#endif +#if (defined(VK_FUCHSIA_imagepipe_surface)) + fp_vkCreateImagePipeSurfaceFUCHSIA = reinterpret_cast(procAddr(instance, "vkCreateImagePipeSurfaceFUCHSIA")); +#endif +#if (defined(VK_GGP_stream_descriptor_surface)) + fp_vkCreateStreamDescriptorSurfaceGGP = reinterpret_cast(procAddr(instance, "vkCreateStreamDescriptorSurfaceGGP")); +#endif +#if (defined(VK_QNX_screen_surface)) + fp_vkCreateScreenSurfaceQNX = reinterpret_cast(procAddr(instance, "vkCreateScreenSurfaceQNX")); +#endif +#if (defined(VK_QNX_screen_surface)) + fp_vkGetPhysicalDeviceScreenPresentationSupportQNX = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX")); +#endif +#if (defined(VK_EXT_debug_report)) + fp_vkCreateDebugReportCallbackEXT = reinterpret_cast(procAddr(instance, "vkCreateDebugReportCallbackEXT")); +#endif +#if (defined(VK_EXT_debug_report)) + fp_vkDestroyDebugReportCallbackEXT = reinterpret_cast(procAddr(instance, "vkDestroyDebugReportCallbackEXT")); +#endif +#if (defined(VK_EXT_debug_report)) + fp_vkDebugReportMessageEXT = reinterpret_cast(procAddr(instance, "vkDebugReportMessageEXT")); +#endif +#if (defined(VK_NV_external_memory_capabilities)) + fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceFeatures2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceFormatProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFormatProperties2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceImageFormatProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceQueueFamilyProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceMemoryProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMemoryProperties2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceSparseImageFormatProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceExternalBufferProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalBufferProperties")); +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV")); +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + fp_vkGetPhysicalDeviceSciBufAttributesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSciBufAttributesNV")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceExternalSemaphoreProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalSemaphoreProperties")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetPhysicalDeviceExternalFenceProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalFenceProperties")); +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + fp_vkGetPhysicalDeviceSciSyncAttributesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSciSyncAttributesNV")); +#endif +#if (defined(VK_EXT_direct_mode_display)) + fp_vkReleaseDisplayEXT = reinterpret_cast(procAddr(instance, "vkReleaseDisplayEXT")); +#endif +#if (defined(VK_EXT_acquire_xlib_display)) + fp_vkAcquireXlibDisplayEXT = reinterpret_cast(procAddr(instance, "vkAcquireXlibDisplayEXT")); +#endif +#if (defined(VK_EXT_acquire_xlib_display)) + fp_vkGetRandROutputDisplayEXT = reinterpret_cast(procAddr(instance, "vkGetRandROutputDisplayEXT")); +#endif +#if (defined(VK_NV_acquire_winrt_display)) + fp_vkAcquireWinrtDisplayNV = reinterpret_cast(procAddr(instance, "vkAcquireWinrtDisplayNV")); +#endif +#if (defined(VK_NV_acquire_winrt_display)) + fp_vkGetWinrtDisplayNV = reinterpret_cast(procAddr(instance, "vkGetWinrtDisplayNV")); +#endif +#if (defined(VK_EXT_display_surface_counter)) + fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkEnumeratePhysicalDeviceGroups = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDeviceGroups")); +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + fp_vkGetPhysicalDevicePresentRectanglesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDevicePresentRectanglesKHR")); +#endif +#if (defined(VK_MVK_ios_surface)) + fp_vkCreateIOSSurfaceMVK = reinterpret_cast(procAddr(instance, "vkCreateIOSSurfaceMVK")); +#endif +#if (defined(VK_MVK_macos_surface)) + fp_vkCreateMacOSSurfaceMVK = reinterpret_cast(procAddr(instance, "vkCreateMacOSSurfaceMVK")); +#endif +#if (defined(VK_EXT_metal_surface)) + fp_vkCreateMetalSurfaceEXT = reinterpret_cast(procAddr(instance, "vkCreateMetalSurfaceEXT")); +#endif +#if (defined(VK_EXT_sample_locations)) + fp_vkGetPhysicalDeviceMultisamplePropertiesEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT")); +#endif +#if (defined(VK_KHR_get_surface_capabilities2)) + fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR")); +#endif +#if (defined(VK_KHR_get_surface_capabilities2)) + fp_vkGetPhysicalDeviceSurfaceFormats2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR")); +#endif +#if (defined(VK_KHR_get_display_properties2)) + fp_vkGetPhysicalDeviceDisplayProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayProperties2KHR")); +#endif +#if (defined(VK_KHR_get_display_properties2)) + fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR")); +#endif +#if (defined(VK_KHR_get_display_properties2)) + fp_vkGetDisplayModeProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetDisplayModeProperties2KHR")); +#endif +#if (defined(VK_KHR_get_display_properties2)) + fp_vkGetDisplayPlaneCapabilities2KHR = reinterpret_cast(procAddr(instance, "vkGetDisplayPlaneCapabilities2KHR")); +#endif +#if (defined(VK_KHR_calibrated_timestamps)) + fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkCreateDebugUtilsMessengerEXT = reinterpret_cast(procAddr(instance, "vkCreateDebugUtilsMessengerEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkDestroyDebugUtilsMessengerEXT = reinterpret_cast(procAddr(instance, "vkDestroyDebugUtilsMessengerEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkSubmitDebugUtilsMessageEXT = reinterpret_cast(procAddr(instance, "vkSubmitDebugUtilsMessageEXT")); +#endif +#if (defined(VK_NV_cooperative_matrix)) + fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV")); +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + fp_vkGetPhysicalDeviceSurfacePresentModes2EXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT")); +#endif +#if (defined(VK_KHR_performance_query)) + fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR")); +#endif +#if (defined(VK_KHR_performance_query)) + fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR")); +#endif +#if (defined(VK_EXT_headless_surface)) + fp_vkCreateHeadlessSurfaceEXT = reinterpret_cast(procAddr(instance, "vkCreateHeadlessSurfaceEXT")); +#endif +#if (defined(VK_NV_coverage_reduction_mode)) + fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkGetPhysicalDeviceToolProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceToolProperties")); +#endif +#if (defined(VK_KHR_object_refresh)) + fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceRefreshableObjectTypesKHR")); +#endif +#if (defined(VK_KHR_fragment_shading_rate)) + fp_vkGetPhysicalDeviceFragmentShadingRatesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkGetPhysicalDeviceVideoCapabilitiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR")); +#endif +#if (defined(VK_KHR_video_encode_queue)) + fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR")); +#endif +#if (defined(VK_EXT_acquire_drm_display)) + fp_vkAcquireDrmDisplayEXT = reinterpret_cast(procAddr(instance, "vkAcquireDrmDisplayEXT")); +#endif +#if (defined(VK_EXT_acquire_drm_display)) + fp_vkGetDrmDisplayEXT = reinterpret_cast(procAddr(instance, "vkGetDrmDisplayEXT")); +#endif +#if (defined(VK_NV_optical_flow)) + fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV")); +#endif +#if (defined(VK_KHR_cooperative_matrix)) + fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures2KHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties2KHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceFormatProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFormatProperties2KHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceImageFormatProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties2KHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceMemoryProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMemoryProperties2KHR")); +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR")); +#endif +#if (defined(VK_KHR_external_memory_capabilities)) + fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR")); +#endif +#if (defined(VK_KHR_external_semaphore_capabilities)) + fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR")); +#endif +#if (defined(VK_KHR_external_fence_capabilities)) + fp_vkGetPhysicalDeviceExternalFencePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR")); +#endif +#if (defined(VK_KHR_device_group_creation)) + fp_vkEnumeratePhysicalDeviceGroupsKHR = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDeviceGroupsKHR")); +#endif +#if (defined(VK_EXT_calibrated_timestamps)) + fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT")); +#endif +#if (defined(VK_EXT_tooling_info)) + fp_vkGetPhysicalDeviceToolPropertiesEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceToolPropertiesEXT")); +#endif + } + void destroyInstance(const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyInstance(instance, pAllocator); + } + VkResult enumeratePhysicalDevices(uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const noexcept { + return fp_vkEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices); + } + PFN_vkVoidFunction getInstanceProcAddr(const char* pName) const noexcept { + return fp_vkGetInstanceProcAddr(instance, pName); + } + void getPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) const noexcept { + fp_vkGetPhysicalDeviceProperties(physicalDevice, pProperties); + } + void getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) const noexcept { + fp_vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void getPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) const noexcept { + fp_vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); + } + void getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const noexcept { + fp_vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures); + } + void getPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) const noexcept { + fp_vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties); + } + VkResult getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) const noexcept { + return fp_vkGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); + } + VkResult enumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties) const noexcept { + return fp_vkEnumerateDeviceLayerProperties(physicalDevice, pPropertyCount, pProperties); + } + VkResult enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) const noexcept { + return fp_vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pPropertyCount, pProperties); + } + void getPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) const noexcept { + fp_vkGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); + } +#if (defined(VK_KHR_android_surface)) + VkResult createAndroidSurfaceKHR(const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_display)) + VkResult getPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties) const noexcept { + return fp_vkGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_display)) + VkResult getPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties) const noexcept { + return fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_display)) + VkResult getDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) const noexcept { + return fp_vkGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays); + } +#endif +#if (defined(VK_KHR_display)) + VkResult getDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties) const noexcept { + return fp_vkGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_display)) + VkResult createDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) const noexcept { + return fp_vkCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode); + } +#endif +#if (defined(VK_KHR_display)) + VkResult getDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities) const noexcept { + return fp_vkGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities); + } +#endif +#if (defined(VK_KHR_display)) + VkResult createDisplayPlaneSurfaceKHR(const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_surface)) + void destroySurfaceKHR(VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySurfaceKHR(instance, surface, pAllocator); + } +#endif +#if (defined(VK_KHR_surface)) + VkResult getPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported) const noexcept { + return fp_vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); + } +#endif +#if (defined(VK_KHR_surface)) + VkResult getPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) const noexcept { + return fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); + } +#endif +#if (defined(VK_KHR_surface)) + VkResult getPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats) const noexcept { + return fp_vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); + } +#endif +#if (defined(VK_KHR_surface)) + VkResult getPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) const noexcept { + return fp_vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); + } +#endif +#if (defined(VK_NN_vi_surface)) + VkResult createViSurfaceNN(const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_wayland_surface)) + VkResult createWaylandSurfaceKHR(const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_wayland_surface)) + VkBool32 getPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) const noexcept { + return fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display); + } +#endif +#if (defined(VK_KHR_win32_surface)) + VkResult createWin32SurfaceKHR(const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_win32_surface)) + VkBool32 getPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) const noexcept { + return fp_vkGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex); + } +#endif +#if (defined(VK_KHR_xlib_surface)) + VkResult createXlibSurfaceKHR(const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_xlib_surface)) + VkBool32 getPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) const noexcept { + return fp_vkGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID); + } +#endif +#if (defined(VK_KHR_xcb_surface)) + VkResult createXcbSurfaceKHR(const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_KHR_xcb_surface)) + VkBool32 getPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) const noexcept { + return fp_vkGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id); + } +#endif +#if (defined(VK_EXT_directfb_surface)) + VkResult createDirectFBSurfaceEXT(const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateDirectFBSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_EXT_directfb_surface)) + VkBool32 getPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb) const noexcept { + return fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT(physicalDevice, queueFamilyIndex, dfb); + } +#endif +#if (defined(VK_FUCHSIA_imagepipe_surface)) + VkResult createImagePipeSurfaceFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_GGP_stream_descriptor_surface)) + VkResult createStreamDescriptorSurfaceGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_QNX_screen_surface)) + VkResult createScreenSurfaceQNX(const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateScreenSurfaceQNX(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_QNX_screen_surface)) + VkBool32 getPhysicalDeviceScreenPresentationSupportQNX(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window) const noexcept { + return fp_vkGetPhysicalDeviceScreenPresentationSupportQNX(physicalDevice, queueFamilyIndex, window); + } +#endif +#if (defined(VK_EXT_debug_report)) + VkResult createDebugReportCallbackEXT(const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback) const noexcept { + return fp_vkCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback); + } +#endif +#if (defined(VK_EXT_debug_report)) + void destroyDebugReportCallbackEXT(VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDebugReportCallbackEXT(instance, callback, pAllocator); + } +#endif +#if (defined(VK_EXT_debug_report)) + void debugReportMessageEXT(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) const noexcept { + fp_vkDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage); + } +#endif +#if (defined(VK_NV_external_memory_capabilities)) + VkResult getPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) const noexcept { + return fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures) const noexcept { + fp_vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties) const noexcept { + fp_vkGetPhysicalDeviceProperties2(physicalDevice, pProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) const noexcept { + fp_vkGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + VkResult getPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties) const noexcept { + return fp_vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const noexcept { + fp_vkGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties) const noexcept { + fp_vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties) const noexcept { + fp_vkGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties) const noexcept { + fp_vkGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + } +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + VkResult getPhysicalDeviceExternalMemorySciBufPropertiesNV(VkPhysicalDevice physicalDevice, VkExternalMemoryHandleTypeFlagBitsKHR handleType, NvSciBufObj handle, VkMemorySciBufPropertiesNV* pMemorySciBufProperties) const noexcept { + return fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV(physicalDevice, handleType, handle, pMemorySciBufProperties); + } +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + VkResult getPhysicalDeviceSciBufAttributesNV(VkPhysicalDevice physicalDevice, NvSciBufAttrList pAttributes) const noexcept { + return fp_vkGetPhysicalDeviceSciBufAttributesNV(physicalDevice, pAttributes); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties) const noexcept { + fp_vkGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties) const noexcept { + fp_vkGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); + } +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + VkResult getPhysicalDeviceSciSyncAttributesNV(VkPhysicalDevice physicalDevice, const VkSciSyncAttributesInfoNV* pSciSyncAttributesInfo, NvSciSyncAttrList pAttributes) const noexcept { + return fp_vkGetPhysicalDeviceSciSyncAttributesNV(physicalDevice, pSciSyncAttributesInfo, pAttributes); + } +#endif +#if (defined(VK_EXT_direct_mode_display)) + VkResult releaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const noexcept { + return fp_vkReleaseDisplayEXT(physicalDevice, display); + } +#endif +#if (defined(VK_EXT_acquire_xlib_display)) + VkResult acquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) const noexcept { + return fp_vkAcquireXlibDisplayEXT(physicalDevice, dpy, display); + } +#endif +#if (defined(VK_EXT_acquire_xlib_display)) + VkResult getRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay) const noexcept { + return fp_vkGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay); + } +#endif +#if (defined(VK_NV_acquire_winrt_display)) + VkResult acquireWinrtDisplayNV(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const noexcept { + return fp_vkAcquireWinrtDisplayNV(physicalDevice, display); + } +#endif +#if (defined(VK_NV_acquire_winrt_display)) + VkResult getWinrtDisplayNV(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay) const noexcept { + return fp_vkGetWinrtDisplayNV(physicalDevice, deviceRelativeId, pDisplay); + } +#endif +#if (defined(VK_EXT_display_surface_counter)) + VkResult getPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities) const noexcept { + return fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities); + } +#endif +#if (defined(VK_VERSION_1_1)) + VkResult enumeratePhysicalDeviceGroups(uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties) const noexcept { + return fp_vkEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + } +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + VkResult getPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects) const noexcept { + return fp_vkGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects); + } +#endif +#if (defined(VK_MVK_ios_surface)) + VkResult createIOSSurfaceMVK(const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_MVK_macos_surface)) + VkResult createMacOSSurfaceMVK(const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_EXT_metal_surface)) + VkResult createMetalSurfaceEXT(const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_EXT_sample_locations)) + void getPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) const noexcept { + fp_vkGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties); + } +#endif +#if (defined(VK_KHR_get_surface_capabilities2)) + VkResult getPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities) const noexcept { + return fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities); + } +#endif +#if (defined(VK_KHR_get_surface_capabilities2)) + VkResult getPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats) const noexcept { + return fp_vkGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); + } +#endif +#if (defined(VK_KHR_get_display_properties2)) + VkResult getPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties) const noexcept { + return fp_vkGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_get_display_properties2)) + VkResult getPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties) const noexcept { + return fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_get_display_properties2)) + VkResult getDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties) const noexcept { + return fp_vkGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_get_display_properties2)) + VkResult getDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities) const noexcept { + return fp_vkGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities); + } +#endif +#if (defined(VK_KHR_calibrated_timestamps)) + VkResult getPhysicalDeviceCalibrateableTimeDomainsKHR(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) const noexcept { + return fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR(physicalDevice, pTimeDomainCount, pTimeDomains); + } +#endif +#if (defined(VK_EXT_debug_utils)) + VkResult createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) const noexcept { + return fp_vkCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void destroyDebugUtilsMessengerEXT(VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void submitDebugUtilsMessageEXT(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const noexcept { + fp_vkSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData); + } +#endif +#if (defined(VK_NV_cooperative_matrix)) + VkResult getPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties) const noexcept { + return fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + VkResult getPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) const noexcept { + return fp_vkGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes); + } +#endif +#if (defined(VK_KHR_performance_query)) + VkResult enumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions) const noexcept { + return fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions); + } +#endif +#if (defined(VK_KHR_performance_query)) + void getPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses) const noexcept { + fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses); + } +#endif +#if (defined(VK_EXT_headless_surface)) + VkResult createHeadlessSurfaceEXT(const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + return fp_vkCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); + } +#endif +#if (defined(VK_NV_coverage_reduction_mode)) + VkResult getPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations) const noexcept { + return fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations); + } +#endif +#if (defined(VK_VERSION_1_3)) + VkResult getPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const noexcept { + return fp_vkGetPhysicalDeviceToolProperties(physicalDevice, pToolCount, pToolProperties); + } +#endif +#if (defined(VK_KHR_object_refresh)) + VkResult getPhysicalDeviceRefreshableObjectTypesKHR(VkPhysicalDevice physicalDevice, uint32_t* pRefreshableObjectTypeCount, VkObjectType* pRefreshableObjectTypes) const noexcept { + return fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR(physicalDevice, pRefreshableObjectTypeCount, pRefreshableObjectTypes); + } +#endif +#if (defined(VK_KHR_fragment_shading_rate)) + VkResult getPhysicalDeviceFragmentShadingRatesKHR(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates) const noexcept { + return fp_vkGetPhysicalDeviceFragmentShadingRatesKHR(physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult getPhysicalDeviceVideoCapabilitiesKHR(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities) const noexcept { + return fp_vkGetPhysicalDeviceVideoCapabilitiesKHR(physicalDevice, pVideoProfile, pCapabilities); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult getPhysicalDeviceVideoFormatPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties) const noexcept { + return fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR(physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties); + } +#endif +#if (defined(VK_KHR_video_encode_queue)) + VkResult getPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties) const noexcept { + return fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(physicalDevice, pQualityLevelInfo, pQualityLevelProperties); + } +#endif +#if (defined(VK_EXT_acquire_drm_display)) + VkResult acquireDrmDisplayEXT(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display) const noexcept { + return fp_vkAcquireDrmDisplayEXT(physicalDevice, drmFd, display); + } +#endif +#if (defined(VK_EXT_acquire_drm_display)) + VkResult getDrmDisplayEXT(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display) const noexcept { + return fp_vkGetDrmDisplayEXT(physicalDevice, drmFd, connectorId, display); + } +#endif +#if (defined(VK_NV_optical_flow)) + VkResult getPhysicalDeviceOpticalFlowImageFormatsNV(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties) const noexcept { + return fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV(physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties); + } +#endif +#if (defined(VK_KHR_cooperative_matrix)) + VkResult getPhysicalDeviceCooperativeMatrixPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties) const noexcept { + return fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(physicalDevice, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + void getPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures) const noexcept { + fp_vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + void getPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties) const noexcept { + fp_vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + void getPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) const noexcept { + fp_vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + VkResult getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties) const noexcept { + return fp_vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + void getPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const noexcept { + fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + void getPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties) const noexcept { + fp_vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties); + } +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + void getPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties) const noexcept { + fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties); + } +#endif +#if (defined(VK_KHR_external_memory_capabilities)) + void getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties) const noexcept { + fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + } +#endif +#if (defined(VK_KHR_external_semaphore_capabilities)) + void getPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties) const noexcept { + fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + } +#endif +#if (defined(VK_KHR_external_fence_capabilities)) + void getPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties) const noexcept { + fp_vkGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); + } +#endif +#if (defined(VK_KHR_device_group_creation)) + VkResult enumeratePhysicalDeviceGroupsKHR(uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties) const noexcept { + return fp_vkEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + } +#endif +#if (defined(VK_EXT_calibrated_timestamps)) + VkResult getPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) const noexcept { + return fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains); + } +#endif +#if (defined(VK_EXT_tooling_info)) + VkResult getPhysicalDeviceToolPropertiesEXT(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const noexcept { + return fp_vkGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties); + } +#endif + PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; + PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; + PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; + PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; + PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; + PFN_vkGetPhysicalDeviceFormatProperties fp_vkGetPhysicalDeviceFormatProperties = nullptr; + PFN_vkGetPhysicalDeviceImageFormatProperties fp_vkGetPhysicalDeviceImageFormatProperties = nullptr; + PFN_vkEnumerateDeviceLayerProperties fp_vkEnumerateDeviceLayerProperties = nullptr; + PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties fp_vkGetPhysicalDeviceSparseImageFormatProperties = nullptr; +#if (defined(VK_KHR_android_surface)) + PFN_vkCreateAndroidSurfaceKHR fp_vkCreateAndroidSurfaceKHR = nullptr; +#else + void * fp_vkCreateAndroidSurfaceKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR fp_vkGetPhysicalDeviceDisplayPropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceDisplayPropertiesKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkGetDisplayPlaneSupportedDisplaysKHR fp_vkGetDisplayPlaneSupportedDisplaysKHR = nullptr; +#else + void * fp_vkGetDisplayPlaneSupportedDisplaysKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkGetDisplayModePropertiesKHR fp_vkGetDisplayModePropertiesKHR = nullptr; +#else + void * fp_vkGetDisplayModePropertiesKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkCreateDisplayModeKHR fp_vkCreateDisplayModeKHR = nullptr; +#else + void * fp_vkCreateDisplayModeKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkGetDisplayPlaneCapabilitiesKHR fp_vkGetDisplayPlaneCapabilitiesKHR = nullptr; +#else + void * fp_vkGetDisplayPlaneCapabilitiesKHR{}; +#endif +#if (defined(VK_KHR_display)) + PFN_vkCreateDisplayPlaneSurfaceKHR fp_vkCreateDisplayPlaneSurfaceKHR = nullptr; +#else + void * fp_vkCreateDisplayPlaneSurfaceKHR{}; +#endif +#if (defined(VK_KHR_surface)) + PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr; +#else + void * fp_vkDestroySurfaceKHR{}; +#endif +#if (defined(VK_KHR_surface)) + PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfaceSupportKHR{}; +#endif +#if (defined(VK_KHR_surface)) + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR{}; +#endif +#if (defined(VK_KHR_surface)) + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfaceFormatsKHR{}; +#endif +#if (defined(VK_KHR_surface)) + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fp_vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfacePresentModesKHR{}; +#endif +#if (defined(VK_NN_vi_surface)) + PFN_vkCreateViSurfaceNN fp_vkCreateViSurfaceNN = nullptr; +#else + void * fp_vkCreateViSurfaceNN{}; +#endif +#if (defined(VK_KHR_wayland_surface)) + PFN_vkCreateWaylandSurfaceKHR fp_vkCreateWaylandSurfaceKHR = nullptr; +#else + void * fp_vkCreateWaylandSurfaceKHR{}; +#endif +#if (defined(VK_KHR_wayland_surface)) + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR{}; +#endif +#if (defined(VK_KHR_win32_surface)) + PFN_vkCreateWin32SurfaceKHR fp_vkCreateWin32SurfaceKHR = nullptr; +#else + void * fp_vkCreateWin32SurfaceKHR{}; +#endif +#if (defined(VK_KHR_win32_surface)) + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR fp_vkGetPhysicalDeviceWin32PresentationSupportKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceWin32PresentationSupportKHR{}; +#endif +#if (defined(VK_KHR_xlib_surface)) + PFN_vkCreateXlibSurfaceKHR fp_vkCreateXlibSurfaceKHR = nullptr; +#else + void * fp_vkCreateXlibSurfaceKHR{}; +#endif +#if (defined(VK_KHR_xlib_surface)) + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR fp_vkGetPhysicalDeviceXlibPresentationSupportKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceXlibPresentationSupportKHR{}; +#endif +#if (defined(VK_KHR_xcb_surface)) + PFN_vkCreateXcbSurfaceKHR fp_vkCreateXcbSurfaceKHR = nullptr; +#else + void * fp_vkCreateXcbSurfaceKHR{}; +#endif +#if (defined(VK_KHR_xcb_surface)) + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR fp_vkGetPhysicalDeviceXcbPresentationSupportKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceXcbPresentationSupportKHR{}; +#endif +#if (defined(VK_EXT_directfb_surface)) + PFN_vkCreateDirectFBSurfaceEXT fp_vkCreateDirectFBSurfaceEXT = nullptr; +#else + void * fp_vkCreateDirectFBSurfaceEXT{}; +#endif +#if (defined(VK_EXT_directfb_surface)) + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = nullptr; +#else + void * fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT{}; +#endif +#if (defined(VK_FUCHSIA_imagepipe_surface)) + PFN_vkCreateImagePipeSurfaceFUCHSIA fp_vkCreateImagePipeSurfaceFUCHSIA = nullptr; +#else + void * fp_vkCreateImagePipeSurfaceFUCHSIA{}; +#endif +#if (defined(VK_GGP_stream_descriptor_surface)) + PFN_vkCreateStreamDescriptorSurfaceGGP fp_vkCreateStreamDescriptorSurfaceGGP = nullptr; +#else + void * fp_vkCreateStreamDescriptorSurfaceGGP{}; +#endif +#if (defined(VK_QNX_screen_surface)) + PFN_vkCreateScreenSurfaceQNX fp_vkCreateScreenSurfaceQNX = nullptr; +#else + void * fp_vkCreateScreenSurfaceQNX{}; +#endif +#if (defined(VK_QNX_screen_surface)) + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX fp_vkGetPhysicalDeviceScreenPresentationSupportQNX = nullptr; +#else + void * fp_vkGetPhysicalDeviceScreenPresentationSupportQNX{}; +#endif +#if (defined(VK_EXT_debug_report)) + PFN_vkCreateDebugReportCallbackEXT fp_vkCreateDebugReportCallbackEXT = nullptr; +#else + void * fp_vkCreateDebugReportCallbackEXT{}; +#endif +#if (defined(VK_EXT_debug_report)) + PFN_vkDestroyDebugReportCallbackEXT fp_vkDestroyDebugReportCallbackEXT = nullptr; +#else + void * fp_vkDestroyDebugReportCallbackEXT{}; +#endif +#if (defined(VK_EXT_debug_report)) + PFN_vkDebugReportMessageEXT fp_vkDebugReportMessageEXT = nullptr; +#else + void * fp_vkDebugReportMessageEXT{}; +#endif +#if (defined(VK_NV_external_memory_capabilities)) + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceFeatures2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceProperties2 fp_vkGetPhysicalDeviceProperties2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceProperties2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceFormatProperties2 fp_vkGetPhysicalDeviceFormatProperties2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceFormatProperties2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceImageFormatProperties2 fp_vkGetPhysicalDeviceImageFormatProperties2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceImageFormatProperties2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 fp_vkGetPhysicalDeviceQueueFamilyProperties2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceQueueFamilyProperties2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceMemoryProperties2 fp_vkGetPhysicalDeviceMemoryProperties2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceMemoryProperties2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 fp_vkGetPhysicalDeviceSparseImageFormatProperties2 = nullptr; +#else + void * fp_vkGetPhysicalDeviceSparseImageFormatProperties2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceExternalBufferProperties fp_vkGetPhysicalDeviceExternalBufferProperties = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalBufferProperties{}; +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + PFN_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV{}; +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + PFN_vkGetPhysicalDeviceSciBufAttributesNV fp_vkGetPhysicalDeviceSciBufAttributesNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceSciBufAttributesNV{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties fp_vkGetPhysicalDeviceExternalSemaphoreProperties = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalSemaphoreProperties{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetPhysicalDeviceExternalFenceProperties fp_vkGetPhysicalDeviceExternalFenceProperties = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalFenceProperties{}; +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + PFN_vkGetPhysicalDeviceSciSyncAttributesNV fp_vkGetPhysicalDeviceSciSyncAttributesNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceSciSyncAttributesNV{}; +#endif +#if (defined(VK_EXT_direct_mode_display)) + PFN_vkReleaseDisplayEXT fp_vkReleaseDisplayEXT = nullptr; +#else + void * fp_vkReleaseDisplayEXT{}; +#endif +#if (defined(VK_EXT_acquire_xlib_display)) + PFN_vkAcquireXlibDisplayEXT fp_vkAcquireXlibDisplayEXT = nullptr; +#else + void * fp_vkAcquireXlibDisplayEXT{}; +#endif +#if (defined(VK_EXT_acquire_xlib_display)) + PFN_vkGetRandROutputDisplayEXT fp_vkGetRandROutputDisplayEXT = nullptr; +#else + void * fp_vkGetRandROutputDisplayEXT{}; +#endif +#if (defined(VK_NV_acquire_winrt_display)) + PFN_vkAcquireWinrtDisplayNV fp_vkAcquireWinrtDisplayNV = nullptr; +#else + void * fp_vkAcquireWinrtDisplayNV{}; +#endif +#if (defined(VK_NV_acquire_winrt_display)) + PFN_vkGetWinrtDisplayNV fp_vkGetWinrtDisplayNV = nullptr; +#else + void * fp_vkGetWinrtDisplayNV{}; +#endif +#if (defined(VK_EXT_display_surface_counter)) + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkEnumeratePhysicalDeviceGroups fp_vkEnumeratePhysicalDeviceGroups = nullptr; +#else + void * fp_vkEnumeratePhysicalDeviceGroups{}; +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + PFN_vkGetPhysicalDevicePresentRectanglesKHR fp_vkGetPhysicalDevicePresentRectanglesKHR = nullptr; +#else + void * fp_vkGetPhysicalDevicePresentRectanglesKHR{}; +#endif +#if (defined(VK_MVK_ios_surface)) + PFN_vkCreateIOSSurfaceMVK fp_vkCreateIOSSurfaceMVK = nullptr; +#else + void * fp_vkCreateIOSSurfaceMVK{}; +#endif +#if (defined(VK_MVK_macos_surface)) + PFN_vkCreateMacOSSurfaceMVK fp_vkCreateMacOSSurfaceMVK = nullptr; +#else + void * fp_vkCreateMacOSSurfaceMVK{}; +#endif +#if (defined(VK_EXT_metal_surface)) + PFN_vkCreateMetalSurfaceEXT fp_vkCreateMetalSurfaceEXT = nullptr; +#else + void * fp_vkCreateMetalSurfaceEXT{}; +#endif +#if (defined(VK_EXT_sample_locations)) + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT fp_vkGetPhysicalDeviceMultisamplePropertiesEXT = nullptr; +#else + void * fp_vkGetPhysicalDeviceMultisamplePropertiesEXT{}; +#endif +#if (defined(VK_KHR_get_surface_capabilities2)) + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR{}; +#endif +#if (defined(VK_KHR_get_surface_capabilities2)) + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR fp_vkGetPhysicalDeviceSurfaceFormats2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfaceFormats2KHR{}; +#endif +#if (defined(VK_KHR_get_display_properties2)) + PFN_vkGetPhysicalDeviceDisplayProperties2KHR fp_vkGetPhysicalDeviceDisplayProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceDisplayProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_display_properties2)) + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_display_properties2)) + PFN_vkGetDisplayModeProperties2KHR fp_vkGetDisplayModeProperties2KHR = nullptr; +#else + void * fp_vkGetDisplayModeProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_display_properties2)) + PFN_vkGetDisplayPlaneCapabilities2KHR fp_vkGetDisplayPlaneCapabilities2KHR = nullptr; +#else + void * fp_vkGetDisplayPlaneCapabilities2KHR{}; +#endif +#if (defined(VK_KHR_calibrated_timestamps)) + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkCreateDebugUtilsMessengerEXT fp_vkCreateDebugUtilsMessengerEXT = nullptr; +#else + void * fp_vkCreateDebugUtilsMessengerEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkDestroyDebugUtilsMessengerEXT fp_vkDestroyDebugUtilsMessengerEXT = nullptr; +#else + void * fp_vkDestroyDebugUtilsMessengerEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkSubmitDebugUtilsMessageEXT fp_vkSubmitDebugUtilsMessageEXT = nullptr; +#else + void * fp_vkSubmitDebugUtilsMessageEXT{}; +#endif +#if (defined(VK_NV_cooperative_matrix)) + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV{}; +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT fp_vkGetPhysicalDeviceSurfacePresentModes2EXT = nullptr; +#else + void * fp_vkGetPhysicalDeviceSurfacePresentModes2EXT{}; +#endif +#if (defined(VK_KHR_performance_query)) + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = nullptr; +#else + void * fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR{}; +#endif +#if (defined(VK_KHR_performance_query)) + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR{}; +#endif +#if (defined(VK_EXT_headless_surface)) + PFN_vkCreateHeadlessSurfaceEXT fp_vkCreateHeadlessSurfaceEXT = nullptr; +#else + void * fp_vkCreateHeadlessSurfaceEXT{}; +#endif +#if (defined(VK_NV_coverage_reduction_mode)) + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkGetPhysicalDeviceToolProperties fp_vkGetPhysicalDeviceToolProperties = nullptr; +#else + void * fp_vkGetPhysicalDeviceToolProperties{}; +#endif +#if (defined(VK_KHR_object_refresh)) + PFN_vkGetPhysicalDeviceRefreshableObjectTypesKHR fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR{}; +#endif +#if (defined(VK_KHR_fragment_shading_rate)) + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR fp_vkGetPhysicalDeviceFragmentShadingRatesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceFragmentShadingRatesKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR fp_vkGetPhysicalDeviceVideoCapabilitiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceVideoCapabilitiesKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR{}; +#endif +#if (defined(VK_KHR_video_encode_queue)) + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR{}; +#endif +#if (defined(VK_EXT_acquire_drm_display)) + PFN_vkAcquireDrmDisplayEXT fp_vkAcquireDrmDisplayEXT = nullptr; +#else + void * fp_vkAcquireDrmDisplayEXT{}; +#endif +#if (defined(VK_EXT_acquire_drm_display)) + PFN_vkGetDrmDisplayEXT fp_vkGetDrmDisplayEXT = nullptr; +#else + void * fp_vkGetDrmDisplayEXT{}; +#endif +#if (defined(VK_NV_optical_flow)) + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = nullptr; +#else + void * fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV{}; +#endif +#if (defined(VK_KHR_cooperative_matrix)) + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceFeatures2KHR fp_vkGetPhysicalDeviceFeatures2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceFeatures2KHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceProperties2KHR fp_vkGetPhysicalDeviceProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceFormatProperties2KHR fp_vkGetPhysicalDeviceFormatProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceFormatProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR fp_vkGetPhysicalDeviceImageFormatProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceImageFormatProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceMemoryProperties2KHR fp_vkGetPhysicalDeviceMemoryProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceMemoryProperties2KHR{}; +#endif +#if (defined(VK_KHR_get_physical_device_properties2)) + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR{}; +#endif +#if (defined(VK_KHR_external_memory_capabilities)) + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR{}; +#endif +#if (defined(VK_KHR_external_semaphore_capabilities)) + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR{}; +#endif +#if (defined(VK_KHR_external_fence_capabilities)) + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR fp_vkGetPhysicalDeviceExternalFencePropertiesKHR = nullptr; +#else + void * fp_vkGetPhysicalDeviceExternalFencePropertiesKHR{}; +#endif +#if (defined(VK_KHR_device_group_creation)) + PFN_vkEnumeratePhysicalDeviceGroupsKHR fp_vkEnumeratePhysicalDeviceGroupsKHR = nullptr; +#else + void * fp_vkEnumeratePhysicalDeviceGroupsKHR{}; +#endif +#if (defined(VK_EXT_calibrated_timestamps)) + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = nullptr; +#else + void * fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT{}; +#endif +#if (defined(VK_EXT_tooling_info)) + PFN_vkGetPhysicalDeviceToolPropertiesEXT fp_vkGetPhysicalDeviceToolPropertiesEXT = nullptr; +#else + void * fp_vkGetPhysicalDeviceToolPropertiesEXT{}; +#endif + bool is_populated() const { return populated; } + VkInstance instance = VK_NULL_HANDLE; + private: + bool populated = false; +}; + +struct DispatchTable { + DispatchTable() = default; + DispatchTable(VkDevice device, PFN_vkGetDeviceProcAddr procAddr) : device(device), populated(true) { + fp_vkGetDeviceQueue = reinterpret_cast(procAddr(device, "vkGetDeviceQueue")); + fp_vkQueueSubmit = reinterpret_cast(procAddr(device, "vkQueueSubmit")); + fp_vkQueueWaitIdle = reinterpret_cast(procAddr(device, "vkQueueWaitIdle")); + fp_vkDeviceWaitIdle = reinterpret_cast(procAddr(device, "vkDeviceWaitIdle")); + fp_vkAllocateMemory = reinterpret_cast(procAddr(device, "vkAllocateMemory")); + fp_vkFreeMemory = reinterpret_cast(procAddr(device, "vkFreeMemory")); + fp_vkMapMemory = reinterpret_cast(procAddr(device, "vkMapMemory")); + fp_vkUnmapMemory = reinterpret_cast(procAddr(device, "vkUnmapMemory")); + fp_vkFlushMappedMemoryRanges = reinterpret_cast(procAddr(device, "vkFlushMappedMemoryRanges")); + fp_vkInvalidateMappedMemoryRanges = reinterpret_cast(procAddr(device, "vkInvalidateMappedMemoryRanges")); + fp_vkGetDeviceMemoryCommitment = reinterpret_cast(procAddr(device, "vkGetDeviceMemoryCommitment")); + fp_vkGetBufferMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements")); + fp_vkBindBufferMemory = reinterpret_cast(procAddr(device, "vkBindBufferMemory")); + fp_vkGetImageMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements")); + fp_vkBindImageMemory = reinterpret_cast(procAddr(device, "vkBindImageMemory")); + fp_vkGetImageSparseMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetImageSparseMemoryRequirements")); + fp_vkQueueBindSparse = reinterpret_cast(procAddr(device, "vkQueueBindSparse")); + fp_vkCreateFence = reinterpret_cast(procAddr(device, "vkCreateFence")); + fp_vkDestroyFence = reinterpret_cast(procAddr(device, "vkDestroyFence")); + fp_vkResetFences = reinterpret_cast(procAddr(device, "vkResetFences")); + fp_vkGetFenceStatus = reinterpret_cast(procAddr(device, "vkGetFenceStatus")); + fp_vkWaitForFences = reinterpret_cast(procAddr(device, "vkWaitForFences")); + fp_vkCreateSemaphore = reinterpret_cast(procAddr(device, "vkCreateSemaphore")); + fp_vkDestroySemaphore = reinterpret_cast(procAddr(device, "vkDestroySemaphore")); + fp_vkCreateEvent = reinterpret_cast(procAddr(device, "vkCreateEvent")); + fp_vkDestroyEvent = reinterpret_cast(procAddr(device, "vkDestroyEvent")); + fp_vkGetEventStatus = reinterpret_cast(procAddr(device, "vkGetEventStatus")); + fp_vkSetEvent = reinterpret_cast(procAddr(device, "vkSetEvent")); + fp_vkResetEvent = reinterpret_cast(procAddr(device, "vkResetEvent")); + fp_vkCreateQueryPool = reinterpret_cast(procAddr(device, "vkCreateQueryPool")); + fp_vkDestroyQueryPool = reinterpret_cast(procAddr(device, "vkDestroyQueryPool")); + fp_vkGetQueryPoolResults = reinterpret_cast(procAddr(device, "vkGetQueryPoolResults")); +#if (defined(VK_VERSION_1_2)) + fp_vkResetQueryPool = reinterpret_cast(procAddr(device, "vkResetQueryPool")); +#endif + fp_vkCreateBuffer = reinterpret_cast(procAddr(device, "vkCreateBuffer")); + fp_vkDestroyBuffer = reinterpret_cast(procAddr(device, "vkDestroyBuffer")); + fp_vkCreateBufferView = reinterpret_cast(procAddr(device, "vkCreateBufferView")); + fp_vkDestroyBufferView = reinterpret_cast(procAddr(device, "vkDestroyBufferView")); + fp_vkCreateImage = reinterpret_cast(procAddr(device, "vkCreateImage")); + fp_vkDestroyImage = reinterpret_cast(procAddr(device, "vkDestroyImage")); + fp_vkGetImageSubresourceLayout = reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout")); + fp_vkCreateImageView = reinterpret_cast(procAddr(device, "vkCreateImageView")); + fp_vkDestroyImageView = reinterpret_cast(procAddr(device, "vkDestroyImageView")); + fp_vkCreateShaderModule = reinterpret_cast(procAddr(device, "vkCreateShaderModule")); + fp_vkDestroyShaderModule = reinterpret_cast(procAddr(device, "vkDestroyShaderModule")); + fp_vkCreatePipelineCache = reinterpret_cast(procAddr(device, "vkCreatePipelineCache")); + fp_vkDestroyPipelineCache = reinterpret_cast(procAddr(device, "vkDestroyPipelineCache")); + fp_vkGetPipelineCacheData = reinterpret_cast(procAddr(device, "vkGetPipelineCacheData")); + fp_vkMergePipelineCaches = reinterpret_cast(procAddr(device, "vkMergePipelineCaches")); +#if (defined(VK_KHR_pipeline_binary)) + fp_vkCreatePipelineBinariesKHR = reinterpret_cast(procAddr(device, "vkCreatePipelineBinariesKHR")); +#endif +#if (defined(VK_KHR_pipeline_binary)) + fp_vkDestroyPipelineBinaryKHR = reinterpret_cast(procAddr(device, "vkDestroyPipelineBinaryKHR")); +#endif +#if (defined(VK_KHR_pipeline_binary)) + fp_vkGetPipelineKeyKHR = reinterpret_cast(procAddr(device, "vkGetPipelineKeyKHR")); +#endif +#if (defined(VK_KHR_pipeline_binary)) + fp_vkGetPipelineBinaryDataKHR = reinterpret_cast(procAddr(device, "vkGetPipelineBinaryDataKHR")); +#endif +#if (defined(VK_KHR_pipeline_binary)) + fp_vkReleaseCapturedPipelineDataKHR = reinterpret_cast(procAddr(device, "vkReleaseCapturedPipelineDataKHR")); +#endif + fp_vkCreateGraphicsPipelines = reinterpret_cast(procAddr(device, "vkCreateGraphicsPipelines")); + fp_vkCreateComputePipelines = reinterpret_cast(procAddr(device, "vkCreateComputePipelines")); +#if (defined(VK_HUAWEI_subpass_shading)) + fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = reinterpret_cast(procAddr(device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI")); +#endif + fp_vkDestroyPipeline = reinterpret_cast(procAddr(device, "vkDestroyPipeline")); + fp_vkCreatePipelineLayout = reinterpret_cast(procAddr(device, "vkCreatePipelineLayout")); + fp_vkDestroyPipelineLayout = reinterpret_cast(procAddr(device, "vkDestroyPipelineLayout")); + fp_vkCreateSampler = reinterpret_cast(procAddr(device, "vkCreateSampler")); + fp_vkDestroySampler = reinterpret_cast(procAddr(device, "vkDestroySampler")); + fp_vkCreateDescriptorSetLayout = reinterpret_cast(procAddr(device, "vkCreateDescriptorSetLayout")); + fp_vkDestroyDescriptorSetLayout = reinterpret_cast(procAddr(device, "vkDestroyDescriptorSetLayout")); + fp_vkCreateDescriptorPool = reinterpret_cast(procAddr(device, "vkCreateDescriptorPool")); + fp_vkDestroyDescriptorPool = reinterpret_cast(procAddr(device, "vkDestroyDescriptorPool")); + fp_vkResetDescriptorPool = reinterpret_cast(procAddr(device, "vkResetDescriptorPool")); + fp_vkAllocateDescriptorSets = reinterpret_cast(procAddr(device, "vkAllocateDescriptorSets")); + fp_vkFreeDescriptorSets = reinterpret_cast(procAddr(device, "vkFreeDescriptorSets")); + fp_vkUpdateDescriptorSets = reinterpret_cast(procAddr(device, "vkUpdateDescriptorSets")); + fp_vkCreateFramebuffer = reinterpret_cast(procAddr(device, "vkCreateFramebuffer")); + fp_vkDestroyFramebuffer = reinterpret_cast(procAddr(device, "vkDestroyFramebuffer")); + fp_vkCreateRenderPass = reinterpret_cast(procAddr(device, "vkCreateRenderPass")); + fp_vkDestroyRenderPass = reinterpret_cast(procAddr(device, "vkDestroyRenderPass")); + fp_vkGetRenderAreaGranularity = reinterpret_cast(procAddr(device, "vkGetRenderAreaGranularity")); +#if (defined(VK_KHR_maintenance5)) + fp_vkGetRenderingAreaGranularityKHR = reinterpret_cast(procAddr(device, "vkGetRenderingAreaGranularityKHR")); +#endif + fp_vkCreateCommandPool = reinterpret_cast(procAddr(device, "vkCreateCommandPool")); + fp_vkDestroyCommandPool = reinterpret_cast(procAddr(device, "vkDestroyCommandPool")); + fp_vkResetCommandPool = reinterpret_cast(procAddr(device, "vkResetCommandPool")); + fp_vkAllocateCommandBuffers = reinterpret_cast(procAddr(device, "vkAllocateCommandBuffers")); + fp_vkFreeCommandBuffers = reinterpret_cast(procAddr(device, "vkFreeCommandBuffers")); + fp_vkBeginCommandBuffer = reinterpret_cast(procAddr(device, "vkBeginCommandBuffer")); + fp_vkEndCommandBuffer = reinterpret_cast(procAddr(device, "vkEndCommandBuffer")); + fp_vkResetCommandBuffer = reinterpret_cast(procAddr(device, "vkResetCommandBuffer")); + fp_vkCmdBindPipeline = reinterpret_cast(procAddr(device, "vkCmdBindPipeline")); +#if (defined(VK_EXT_attachment_feedback_loop_dynamic_state)) + fp_vkCmdSetAttachmentFeedbackLoopEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetAttachmentFeedbackLoopEnableEXT")); +#endif + fp_vkCmdSetViewport = reinterpret_cast(procAddr(device, "vkCmdSetViewport")); + fp_vkCmdSetScissor = reinterpret_cast(procAddr(device, "vkCmdSetScissor")); + fp_vkCmdSetLineWidth = reinterpret_cast(procAddr(device, "vkCmdSetLineWidth")); + fp_vkCmdSetDepthBias = reinterpret_cast(procAddr(device, "vkCmdSetDepthBias")); + fp_vkCmdSetBlendConstants = reinterpret_cast(procAddr(device, "vkCmdSetBlendConstants")); + fp_vkCmdSetDepthBounds = reinterpret_cast(procAddr(device, "vkCmdSetDepthBounds")); + fp_vkCmdSetStencilCompareMask = reinterpret_cast(procAddr(device, "vkCmdSetStencilCompareMask")); + fp_vkCmdSetStencilWriteMask = reinterpret_cast(procAddr(device, "vkCmdSetStencilWriteMask")); + fp_vkCmdSetStencilReference = reinterpret_cast(procAddr(device, "vkCmdSetStencilReference")); + fp_vkCmdBindDescriptorSets = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorSets")); + fp_vkCmdBindIndexBuffer = reinterpret_cast(procAddr(device, "vkCmdBindIndexBuffer")); + fp_vkCmdBindVertexBuffers = reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers")); + fp_vkCmdDraw = reinterpret_cast(procAddr(device, "vkCmdDraw")); + fp_vkCmdDrawIndexed = reinterpret_cast(procAddr(device, "vkCmdDrawIndexed")); +#if (defined(VK_EXT_multi_draw)) + fp_vkCmdDrawMultiEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMultiEXT")); +#endif +#if (defined(VK_EXT_multi_draw)) + fp_vkCmdDrawMultiIndexedEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMultiIndexedEXT")); +#endif + fp_vkCmdDrawIndirect = reinterpret_cast(procAddr(device, "vkCmdDrawIndirect")); + fp_vkCmdDrawIndexedIndirect = reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirect")); + fp_vkCmdDispatch = reinterpret_cast(procAddr(device, "vkCmdDispatch")); + fp_vkCmdDispatchIndirect = reinterpret_cast(procAddr(device, "vkCmdDispatchIndirect")); +#if (defined(VK_HUAWEI_subpass_shading)) + fp_vkCmdSubpassShadingHUAWEI = reinterpret_cast(procAddr(device, "vkCmdSubpassShadingHUAWEI")); +#endif +#if (defined(VK_HUAWEI_cluster_culling_shader)) + fp_vkCmdDrawClusterHUAWEI = reinterpret_cast(procAddr(device, "vkCmdDrawClusterHUAWEI")); +#endif +#if (defined(VK_HUAWEI_cluster_culling_shader)) + fp_vkCmdDrawClusterIndirectHUAWEI = reinterpret_cast(procAddr(device, "vkCmdDrawClusterIndirectHUAWEI")); +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + fp_vkCmdUpdatePipelineIndirectBufferNV = reinterpret_cast(procAddr(device, "vkCmdUpdatePipelineIndirectBufferNV")); +#endif + fp_vkCmdCopyBuffer = reinterpret_cast(procAddr(device, "vkCmdCopyBuffer")); + fp_vkCmdCopyImage = reinterpret_cast(procAddr(device, "vkCmdCopyImage")); + fp_vkCmdBlitImage = reinterpret_cast(procAddr(device, "vkCmdBlitImage")); + fp_vkCmdCopyBufferToImage = reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage")); + fp_vkCmdCopyImageToBuffer = reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer")); +#if (defined(VK_NV_copy_memory_indirect)) + fp_vkCmdCopyMemoryIndirectNV = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryIndirectNV")); +#endif +#if (defined(VK_NV_copy_memory_indirect)) + fp_vkCmdCopyMemoryToImageIndirectNV = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToImageIndirectNV")); +#endif + fp_vkCmdUpdateBuffer = reinterpret_cast(procAddr(device, "vkCmdUpdateBuffer")); + fp_vkCmdFillBuffer = reinterpret_cast(procAddr(device, "vkCmdFillBuffer")); + fp_vkCmdClearColorImage = reinterpret_cast(procAddr(device, "vkCmdClearColorImage")); + fp_vkCmdClearDepthStencilImage = reinterpret_cast(procAddr(device, "vkCmdClearDepthStencilImage")); + fp_vkCmdClearAttachments = reinterpret_cast(procAddr(device, "vkCmdClearAttachments")); + fp_vkCmdResolveImage = reinterpret_cast(procAddr(device, "vkCmdResolveImage")); + fp_vkCmdSetEvent = reinterpret_cast(procAddr(device, "vkCmdSetEvent")); + fp_vkCmdResetEvent = reinterpret_cast(procAddr(device, "vkCmdResetEvent")); + fp_vkCmdWaitEvents = reinterpret_cast(procAddr(device, "vkCmdWaitEvents")); + fp_vkCmdPipelineBarrier = reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier")); + fp_vkCmdBeginQuery = reinterpret_cast(procAddr(device, "vkCmdBeginQuery")); + fp_vkCmdEndQuery = reinterpret_cast(procAddr(device, "vkCmdEndQuery")); +#if (defined(VK_EXT_conditional_rendering)) + fp_vkCmdBeginConditionalRenderingEXT = reinterpret_cast(procAddr(device, "vkCmdBeginConditionalRenderingEXT")); +#endif +#if (defined(VK_EXT_conditional_rendering)) + fp_vkCmdEndConditionalRenderingEXT = reinterpret_cast(procAddr(device, "vkCmdEndConditionalRenderingEXT")); +#endif + fp_vkCmdResetQueryPool = reinterpret_cast(procAddr(device, "vkCmdResetQueryPool")); + fp_vkCmdWriteTimestamp = reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp")); + fp_vkCmdCopyQueryPoolResults = reinterpret_cast(procAddr(device, "vkCmdCopyQueryPoolResults")); + fp_vkCmdPushConstants = reinterpret_cast(procAddr(device, "vkCmdPushConstants")); + fp_vkCmdBeginRenderPass = reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass")); + fp_vkCmdNextSubpass = reinterpret_cast(procAddr(device, "vkCmdNextSubpass")); + fp_vkCmdEndRenderPass = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass")); + fp_vkCmdExecuteCommands = reinterpret_cast(procAddr(device, "vkCmdExecuteCommands")); +#if (defined(VK_KHR_display_swapchain)) + fp_vkCreateSharedSwapchainsKHR = reinterpret_cast(procAddr(device, "vkCreateSharedSwapchainsKHR")); +#endif +#if (defined(VK_KHR_swapchain)) + fp_vkCreateSwapchainKHR = reinterpret_cast(procAddr(device, "vkCreateSwapchainKHR")); +#endif +#if (defined(VK_KHR_swapchain)) + fp_vkDestroySwapchainKHR = reinterpret_cast(procAddr(device, "vkDestroySwapchainKHR")); +#endif +#if (defined(VK_KHR_swapchain)) + fp_vkGetSwapchainImagesKHR = reinterpret_cast(procAddr(device, "vkGetSwapchainImagesKHR")); +#endif +#if (defined(VK_KHR_swapchain)) + fp_vkAcquireNextImageKHR = reinterpret_cast(procAddr(device, "vkAcquireNextImageKHR")); +#endif +#if (defined(VK_KHR_swapchain)) + fp_vkQueuePresentKHR = reinterpret_cast(procAddr(device, "vkQueuePresentKHR")); +#endif +#if (defined(VK_EXT_debug_marker)) + fp_vkDebugMarkerSetObjectNameEXT = reinterpret_cast(procAddr(device, "vkDebugMarkerSetObjectNameEXT")); +#endif +#if (defined(VK_EXT_debug_marker)) + fp_vkDebugMarkerSetObjectTagEXT = reinterpret_cast(procAddr(device, "vkDebugMarkerSetObjectTagEXT")); +#endif +#if (defined(VK_EXT_debug_marker)) + fp_vkCmdDebugMarkerBeginEXT = reinterpret_cast(procAddr(device, "vkCmdDebugMarkerBeginEXT")); +#endif +#if (defined(VK_EXT_debug_marker)) + fp_vkCmdDebugMarkerEndEXT = reinterpret_cast(procAddr(device, "vkCmdDebugMarkerEndEXT")); +#endif +#if (defined(VK_EXT_debug_marker)) + fp_vkCmdDebugMarkerInsertEXT = reinterpret_cast(procAddr(device, "vkCmdDebugMarkerInsertEXT")); +#endif +#if (defined(VK_NV_external_memory_win32)) + fp_vkGetMemoryWin32HandleNV = reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandleNV")); +#endif +#if (defined(VK_NV_device_generated_commands)) + fp_vkCmdExecuteGeneratedCommandsNV = reinterpret_cast(procAddr(device, "vkCmdExecuteGeneratedCommandsNV")); +#endif +#if (defined(VK_NV_device_generated_commands)) + fp_vkCmdPreprocessGeneratedCommandsNV = reinterpret_cast(procAddr(device, "vkCmdPreprocessGeneratedCommandsNV")); +#endif +#if (defined(VK_NV_device_generated_commands)) + fp_vkCmdBindPipelineShaderGroupNV = reinterpret_cast(procAddr(device, "vkCmdBindPipelineShaderGroupNV")); +#endif +#if (defined(VK_NV_device_generated_commands)) + fp_vkGetGeneratedCommandsMemoryRequirementsNV = reinterpret_cast(procAddr(device, "vkGetGeneratedCommandsMemoryRequirementsNV")); +#endif +#if (defined(VK_NV_device_generated_commands)) + fp_vkCreateIndirectCommandsLayoutNV = reinterpret_cast(procAddr(device, "vkCreateIndirectCommandsLayoutNV")); +#endif +#if (defined(VK_NV_device_generated_commands)) + fp_vkDestroyIndirectCommandsLayoutNV = reinterpret_cast(procAddr(device, "vkDestroyIndirectCommandsLayoutNV")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkCmdExecuteGeneratedCommandsEXT = reinterpret_cast(procAddr(device, "vkCmdExecuteGeneratedCommandsEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkCmdPreprocessGeneratedCommandsEXT = reinterpret_cast(procAddr(device, "vkCmdPreprocessGeneratedCommandsEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkGetGeneratedCommandsMemoryRequirementsEXT = reinterpret_cast(procAddr(device, "vkGetGeneratedCommandsMemoryRequirementsEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkCreateIndirectCommandsLayoutEXT = reinterpret_cast(procAddr(device, "vkCreateIndirectCommandsLayoutEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkDestroyIndirectCommandsLayoutEXT = reinterpret_cast(procAddr(device, "vkDestroyIndirectCommandsLayoutEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkCreateIndirectExecutionSetEXT = reinterpret_cast(procAddr(device, "vkCreateIndirectExecutionSetEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkDestroyIndirectExecutionSetEXT = reinterpret_cast(procAddr(device, "vkDestroyIndirectExecutionSetEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkUpdateIndirectExecutionSetPipelineEXT = reinterpret_cast(procAddr(device, "vkUpdateIndirectExecutionSetPipelineEXT")); +#endif +#if (defined(VK_EXT_device_generated_commands)) + fp_vkUpdateIndirectExecutionSetShaderEXT = reinterpret_cast(procAddr(device, "vkUpdateIndirectExecutionSetShaderEXT")); +#endif +#if (defined(VK_KHR_push_descriptor)) + fp_vkCmdPushDescriptorSetKHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetKHR")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkTrimCommandPool = reinterpret_cast(procAddr(device, "vkTrimCommandPool")); +#endif +#if (defined(VK_KHR_external_memory_win32)) + fp_vkGetMemoryWin32HandleKHR = reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandleKHR")); +#endif +#if (defined(VK_KHR_external_memory_win32)) + fp_vkGetMemoryWin32HandlePropertiesKHR = reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandlePropertiesKHR")); +#endif +#if (defined(VK_KHR_external_memory_fd)) + fp_vkGetMemoryFdKHR = reinterpret_cast(procAddr(device, "vkGetMemoryFdKHR")); +#endif +#if (defined(VK_KHR_external_memory_fd)) + fp_vkGetMemoryFdPropertiesKHR = reinterpret_cast(procAddr(device, "vkGetMemoryFdPropertiesKHR")); +#endif +#if (defined(VK_FUCHSIA_external_memory)) + fp_vkGetMemoryZirconHandleFUCHSIA = reinterpret_cast(procAddr(device, "vkGetMemoryZirconHandleFUCHSIA")); +#endif +#if (defined(VK_FUCHSIA_external_memory)) + fp_vkGetMemoryZirconHandlePropertiesFUCHSIA = reinterpret_cast(procAddr(device, "vkGetMemoryZirconHandlePropertiesFUCHSIA")); +#endif +#if (defined(VK_NV_external_memory_rdma)) + fp_vkGetMemoryRemoteAddressNV = reinterpret_cast(procAddr(device, "vkGetMemoryRemoteAddressNV")); +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + fp_vkGetMemorySciBufNV = reinterpret_cast(procAddr(device, "vkGetMemorySciBufNV")); +#endif +#if (defined(VK_KHR_external_semaphore_win32)) + fp_vkGetSemaphoreWin32HandleKHR = reinterpret_cast(procAddr(device, "vkGetSemaphoreWin32HandleKHR")); +#endif +#if (defined(VK_KHR_external_semaphore_win32)) + fp_vkImportSemaphoreWin32HandleKHR = reinterpret_cast(procAddr(device, "vkImportSemaphoreWin32HandleKHR")); +#endif +#if (defined(VK_KHR_external_semaphore_fd)) + fp_vkGetSemaphoreFdKHR = reinterpret_cast(procAddr(device, "vkGetSemaphoreFdKHR")); +#endif +#if (defined(VK_KHR_external_semaphore_fd)) + fp_vkImportSemaphoreFdKHR = reinterpret_cast(procAddr(device, "vkImportSemaphoreFdKHR")); +#endif +#if (defined(VK_FUCHSIA_external_semaphore)) + fp_vkGetSemaphoreZirconHandleFUCHSIA = reinterpret_cast(procAddr(device, "vkGetSemaphoreZirconHandleFUCHSIA")); +#endif +#if (defined(VK_FUCHSIA_external_semaphore)) + fp_vkImportSemaphoreZirconHandleFUCHSIA = reinterpret_cast(procAddr(device, "vkImportSemaphoreZirconHandleFUCHSIA")); +#endif +#if (defined(VK_KHR_external_fence_win32)) + fp_vkGetFenceWin32HandleKHR = reinterpret_cast(procAddr(device, "vkGetFenceWin32HandleKHR")); +#endif +#if (defined(VK_KHR_external_fence_win32)) + fp_vkImportFenceWin32HandleKHR = reinterpret_cast(procAddr(device, "vkImportFenceWin32HandleKHR")); +#endif +#if (defined(VK_KHR_external_fence_fd)) + fp_vkGetFenceFdKHR = reinterpret_cast(procAddr(device, "vkGetFenceFdKHR")); +#endif +#if (defined(VK_KHR_external_fence_fd)) + fp_vkImportFenceFdKHR = reinterpret_cast(procAddr(device, "vkImportFenceFdKHR")); +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + fp_vkGetFenceSciSyncFenceNV = reinterpret_cast(procAddr(device, "vkGetFenceSciSyncFenceNV")); +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + fp_vkGetFenceSciSyncObjNV = reinterpret_cast(procAddr(device, "vkGetFenceSciSyncObjNV")); +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + fp_vkImportFenceSciSyncFenceNV = reinterpret_cast(procAddr(device, "vkImportFenceSciSyncFenceNV")); +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + fp_vkImportFenceSciSyncObjNV = reinterpret_cast(procAddr(device, "vkImportFenceSciSyncObjNV")); +#endif +#if (defined(VK_NV_external_sci_sync)) + fp_vkGetSemaphoreSciSyncObjNV = reinterpret_cast(procAddr(device, "vkGetSemaphoreSciSyncObjNV")); +#endif +#if (defined(VK_NV_external_sci_sync)) + fp_vkImportSemaphoreSciSyncObjNV = reinterpret_cast(procAddr(device, "vkImportSemaphoreSciSyncObjNV")); +#endif +#if (defined(VK_NV_external_sci_sync2)) + fp_vkCreateSemaphoreSciSyncPoolNV = reinterpret_cast(procAddr(device, "vkCreateSemaphoreSciSyncPoolNV")); +#endif +#if (defined(VK_NV_external_sci_sync2)) + fp_vkDestroySemaphoreSciSyncPoolNV = reinterpret_cast(procAddr(device, "vkDestroySemaphoreSciSyncPoolNV")); +#endif +#if (defined(VK_EXT_display_control)) + fp_vkDisplayPowerControlEXT = reinterpret_cast(procAddr(device, "vkDisplayPowerControlEXT")); +#endif +#if (defined(VK_EXT_display_control)) + fp_vkRegisterDeviceEventEXT = reinterpret_cast(procAddr(device, "vkRegisterDeviceEventEXT")); +#endif +#if (defined(VK_EXT_display_control)) + fp_vkRegisterDisplayEventEXT = reinterpret_cast(procAddr(device, "vkRegisterDisplayEventEXT")); +#endif +#if (defined(VK_EXT_display_control)) + fp_vkGetSwapchainCounterEXT = reinterpret_cast(procAddr(device, "vkGetSwapchainCounterEXT")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetDeviceGroupPeerMemoryFeatures = reinterpret_cast(procAddr(device, "vkGetDeviceGroupPeerMemoryFeatures")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkBindBufferMemory2 = reinterpret_cast(procAddr(device, "vkBindBufferMemory2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkBindImageMemory2 = reinterpret_cast(procAddr(device, "vkBindImageMemory2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkCmdSetDeviceMask = reinterpret_cast(procAddr(device, "vkCmdSetDeviceMask")); +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + fp_vkGetDeviceGroupPresentCapabilitiesKHR = reinterpret_cast(procAddr(device, "vkGetDeviceGroupPresentCapabilitiesKHR")); +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + fp_vkGetDeviceGroupSurfacePresentModesKHR = reinterpret_cast(procAddr(device, "vkGetDeviceGroupSurfacePresentModesKHR")); +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + fp_vkAcquireNextImage2KHR = reinterpret_cast(procAddr(device, "vkAcquireNextImage2KHR")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkCmdDispatchBase = reinterpret_cast(procAddr(device, "vkCmdDispatchBase")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkCreateDescriptorUpdateTemplate = reinterpret_cast(procAddr(device, "vkCreateDescriptorUpdateTemplate")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkDestroyDescriptorUpdateTemplate = reinterpret_cast(procAddr(device, "vkDestroyDescriptorUpdateTemplate")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkUpdateDescriptorSetWithTemplate = reinterpret_cast(procAddr(device, "vkUpdateDescriptorSetWithTemplate")); +#endif +#if (defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_descriptor_update_template)) + fp_vkCmdPushDescriptorSetWithTemplateKHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetWithTemplateKHR")); +#endif +#if (defined(VK_EXT_hdr_metadata)) + fp_vkSetHdrMetadataEXT = reinterpret_cast(procAddr(device, "vkSetHdrMetadataEXT")); +#endif +#if (defined(VK_KHR_shared_presentable_image)) + fp_vkGetSwapchainStatusKHR = reinterpret_cast(procAddr(device, "vkGetSwapchainStatusKHR")); +#endif +#if (defined(VK_GOOGLE_display_timing)) + fp_vkGetRefreshCycleDurationGOOGLE = reinterpret_cast(procAddr(device, "vkGetRefreshCycleDurationGOOGLE")); +#endif +#if (defined(VK_GOOGLE_display_timing)) + fp_vkGetPastPresentationTimingGOOGLE = reinterpret_cast(procAddr(device, "vkGetPastPresentationTimingGOOGLE")); +#endif +#if (defined(VK_NV_clip_space_w_scaling)) + fp_vkCmdSetViewportWScalingNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportWScalingNV")); +#endif +#if (defined(VK_EXT_discard_rectangles)) + fp_vkCmdSetDiscardRectangleEXT = reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleEXT")); +#endif +#if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 + fp_vkCmdSetDiscardRectangleEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleEnableEXT")); +#endif +#if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 + fp_vkCmdSetDiscardRectangleModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleModeEXT")); +#endif +#if (defined(VK_EXT_sample_locations)) + fp_vkCmdSetSampleLocationsEXT = reinterpret_cast(procAddr(device, "vkCmdSetSampleLocationsEXT")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetBufferMemoryRequirements2 = reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetImageMemoryRequirements2 = reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements2")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetImageSparseMemoryRequirements2 = reinterpret_cast(procAddr(device, "vkGetImageSparseMemoryRequirements2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkGetDeviceBufferMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetDeviceBufferMemoryRequirements")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkGetDeviceImageMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetDeviceImageMemoryRequirements")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkGetDeviceImageSparseMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetDeviceImageSparseMemoryRequirements")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkCreateSamplerYcbcrConversion = reinterpret_cast(procAddr(device, "vkCreateSamplerYcbcrConversion")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkDestroySamplerYcbcrConversion = reinterpret_cast(procAddr(device, "vkDestroySamplerYcbcrConversion")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetDeviceQueue2 = reinterpret_cast(procAddr(device, "vkGetDeviceQueue2")); +#endif +#if (defined(VK_EXT_validation_cache)) + fp_vkCreateValidationCacheEXT = reinterpret_cast(procAddr(device, "vkCreateValidationCacheEXT")); +#endif +#if (defined(VK_EXT_validation_cache)) + fp_vkDestroyValidationCacheEXT = reinterpret_cast(procAddr(device, "vkDestroyValidationCacheEXT")); +#endif +#if (defined(VK_EXT_validation_cache)) + fp_vkGetValidationCacheDataEXT = reinterpret_cast(procAddr(device, "vkGetValidationCacheDataEXT")); +#endif +#if (defined(VK_EXT_validation_cache)) + fp_vkMergeValidationCachesEXT = reinterpret_cast(procAddr(device, "vkMergeValidationCachesEXT")); +#endif +#if (defined(VK_VERSION_1_1)) + fp_vkGetDescriptorSetLayoutSupport = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSupport")); +#endif +#if (defined(VK_ANDROID_native_buffer)) + fp_vkGetSwapchainGrallocUsageANDROID = reinterpret_cast(procAddr(device, "vkGetSwapchainGrallocUsageANDROID")); +#endif +#if (defined(VK_ANDROID_native_buffer)) + fp_vkGetSwapchainGrallocUsage2ANDROID = reinterpret_cast(procAddr(device, "vkGetSwapchainGrallocUsage2ANDROID")); +#endif +#if (defined(VK_ANDROID_native_buffer)) + fp_vkAcquireImageANDROID = reinterpret_cast(procAddr(device, "vkAcquireImageANDROID")); +#endif +#if (defined(VK_ANDROID_native_buffer)) + fp_vkQueueSignalReleaseImageANDROID = reinterpret_cast(procAddr(device, "vkQueueSignalReleaseImageANDROID")); +#endif +#if (defined(VK_AMD_shader_info)) + fp_vkGetShaderInfoAMD = reinterpret_cast(procAddr(device, "vkGetShaderInfoAMD")); +#endif +#if (defined(VK_AMD_display_native_hdr)) + fp_vkSetLocalDimmingAMD = reinterpret_cast(procAddr(device, "vkSetLocalDimmingAMD")); +#endif +#if (defined(VK_KHR_calibrated_timestamps)) + fp_vkGetCalibratedTimestampsKHR = reinterpret_cast(procAddr(device, "vkGetCalibratedTimestampsKHR")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkSetDebugUtilsObjectNameEXT = reinterpret_cast(procAddr(device, "vkSetDebugUtilsObjectNameEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkSetDebugUtilsObjectTagEXT = reinterpret_cast(procAddr(device, "vkSetDebugUtilsObjectTagEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkQueueBeginDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkQueueBeginDebugUtilsLabelEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkQueueEndDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkQueueEndDebugUtilsLabelEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkQueueInsertDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkQueueInsertDebugUtilsLabelEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkCmdBeginDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkCmdBeginDebugUtilsLabelEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkCmdEndDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkCmdEndDebugUtilsLabelEXT")); +#endif +#if (defined(VK_EXT_debug_utils)) + fp_vkCmdInsertDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkCmdInsertDebugUtilsLabelEXT")); +#endif +#if (defined(VK_EXT_external_memory_host)) + fp_vkGetMemoryHostPointerPropertiesEXT = reinterpret_cast(procAddr(device, "vkGetMemoryHostPointerPropertiesEXT")); +#endif +#if (defined(VK_AMD_buffer_marker)) + fp_vkCmdWriteBufferMarkerAMD = reinterpret_cast(procAddr(device, "vkCmdWriteBufferMarkerAMD")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkCreateRenderPass2 = reinterpret_cast(procAddr(device, "vkCreateRenderPass2")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkCmdBeginRenderPass2 = reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass2")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkCmdNextSubpass2 = reinterpret_cast(procAddr(device, "vkCmdNextSubpass2")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkCmdEndRenderPass2 = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass2")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkGetSemaphoreCounterValue = reinterpret_cast(procAddr(device, "vkGetSemaphoreCounterValue")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkWaitSemaphores = reinterpret_cast(procAddr(device, "vkWaitSemaphores")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkSignalSemaphore = reinterpret_cast(procAddr(device, "vkSignalSemaphore")); +#endif +#if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) + fp_vkGetAndroidHardwareBufferPropertiesANDROID = reinterpret_cast(procAddr(device, "vkGetAndroidHardwareBufferPropertiesANDROID")); +#endif +#if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) + fp_vkGetMemoryAndroidHardwareBufferANDROID = reinterpret_cast(procAddr(device, "vkGetMemoryAndroidHardwareBufferANDROID")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkCmdDrawIndirectCount = reinterpret_cast(procAddr(device, "vkCmdDrawIndirectCount")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkCmdDrawIndexedIndirectCount = reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirectCount")); +#endif +#if (defined(VK_NV_device_diagnostic_checkpoints)) + fp_vkCmdSetCheckpointNV = reinterpret_cast(procAddr(device, "vkCmdSetCheckpointNV")); +#endif +#if (defined(VK_NV_device_diagnostic_checkpoints)) + fp_vkGetQueueCheckpointDataNV = reinterpret_cast(procAddr(device, "vkGetQueueCheckpointDataNV")); +#endif +#if (defined(VK_EXT_transform_feedback)) + fp_vkCmdBindTransformFeedbackBuffersEXT = reinterpret_cast(procAddr(device, "vkCmdBindTransformFeedbackBuffersEXT")); +#endif +#if (defined(VK_EXT_transform_feedback)) + fp_vkCmdBeginTransformFeedbackEXT = reinterpret_cast(procAddr(device, "vkCmdBeginTransformFeedbackEXT")); +#endif +#if (defined(VK_EXT_transform_feedback)) + fp_vkCmdEndTransformFeedbackEXT = reinterpret_cast(procAddr(device, "vkCmdEndTransformFeedbackEXT")); +#endif +#if (defined(VK_EXT_transform_feedback)) + fp_vkCmdBeginQueryIndexedEXT = reinterpret_cast(procAddr(device, "vkCmdBeginQueryIndexedEXT")); +#endif +#if (defined(VK_EXT_transform_feedback)) + fp_vkCmdEndQueryIndexedEXT = reinterpret_cast(procAddr(device, "vkCmdEndQueryIndexedEXT")); +#endif +#if (defined(VK_EXT_transform_feedback)) + fp_vkCmdDrawIndirectByteCountEXT = reinterpret_cast(procAddr(device, "vkCmdDrawIndirectByteCountEXT")); +#endif +#if (defined(VK_NV_scissor_exclusive)) + fp_vkCmdSetExclusiveScissorNV = reinterpret_cast(procAddr(device, "vkCmdSetExclusiveScissorNV")); +#endif +#if ((defined(VK_NV_scissor_exclusive))) && VK_HEADER_VERSION >= 241 + fp_vkCmdSetExclusiveScissorEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetExclusiveScissorEnableNV")); +#endif +#if (defined(VK_NV_shading_rate_image)) + fp_vkCmdBindShadingRateImageNV = reinterpret_cast(procAddr(device, "vkCmdBindShadingRateImageNV")); +#endif +#if (defined(VK_NV_shading_rate_image)) + fp_vkCmdSetViewportShadingRatePaletteNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportShadingRatePaletteNV")); +#endif +#if (defined(VK_NV_shading_rate_image)) + fp_vkCmdSetCoarseSampleOrderNV = reinterpret_cast(procAddr(device, "vkCmdSetCoarseSampleOrderNV")); +#endif +#if (defined(VK_NV_mesh_shader)) + fp_vkCmdDrawMeshTasksNV = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksNV")); +#endif +#if (defined(VK_NV_mesh_shader)) + fp_vkCmdDrawMeshTasksIndirectNV = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectNV")); +#endif +#if (defined(VK_NV_mesh_shader)) + fp_vkCmdDrawMeshTasksIndirectCountNV = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectCountNV")); +#endif +#if (defined(VK_EXT_mesh_shader)) + fp_vkCmdDrawMeshTasksEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksEXT")); +#endif +#if (defined(VK_EXT_mesh_shader)) + fp_vkCmdDrawMeshTasksIndirectEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectEXT")); +#endif +#if (defined(VK_EXT_mesh_shader)) + fp_vkCmdDrawMeshTasksIndirectCountEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectCountEXT")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCompileDeferredNV = reinterpret_cast(procAddr(device, "vkCompileDeferredNV")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCreateAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkCreateAccelerationStructureNV")); +#endif +#if (defined(VK_HUAWEI_invocation_mask)) + fp_vkCmdBindInvocationMaskHUAWEI = reinterpret_cast(procAddr(device, "vkCmdBindInvocationMaskHUAWEI")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkDestroyAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkDestroyAccelerationStructureKHR")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkDestroyAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkDestroyAccelerationStructureNV")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkGetAccelerationStructureMemoryRequirementsNV = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureMemoryRequirementsNV")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkBindAccelerationStructureMemoryNV = reinterpret_cast(procAddr(device, "vkBindAccelerationStructureMemoryNV")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCmdCopyAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkCmdCopyAccelerationStructureNV")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCmdCopyAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCmdCopyAccelerationStructureKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCopyAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCopyAccelerationStructureKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCmdCopyAccelerationStructureToMemoryKHR = reinterpret_cast(procAddr(device, "vkCmdCopyAccelerationStructureToMemoryKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCopyAccelerationStructureToMemoryKHR = reinterpret_cast(procAddr(device, "vkCopyAccelerationStructureToMemoryKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCmdCopyMemoryToAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToAccelerationStructureKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCopyMemoryToAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCopyMemoryToAccelerationStructureKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCmdWriteAccelerationStructuresPropertiesKHR = reinterpret_cast(procAddr(device, "vkCmdWriteAccelerationStructuresPropertiesKHR")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCmdWriteAccelerationStructuresPropertiesNV = reinterpret_cast(procAddr(device, "vkCmdWriteAccelerationStructuresPropertiesNV")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCmdBuildAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkCmdBuildAccelerationStructureNV")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkWriteAccelerationStructuresPropertiesKHR = reinterpret_cast(procAddr(device, "vkWriteAccelerationStructuresPropertiesKHR")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkCmdTraceRaysKHR = reinterpret_cast(procAddr(device, "vkCmdTraceRaysKHR")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCmdTraceRaysNV = reinterpret_cast(procAddr(device, "vkCmdTraceRaysNV")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkGetRayTracingShaderGroupHandlesKHR = reinterpret_cast(procAddr(device, "vkGetRayTracingShaderGroupHandlesKHR")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = reinterpret_cast(procAddr(device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkGetAccelerationStructureHandleNV = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureHandleNV")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkCreateRayTracingPipelinesNV = reinterpret_cast(procAddr(device, "vkCreateRayTracingPipelinesNV")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkCreateRayTracingPipelinesKHR = reinterpret_cast(procAddr(device, "vkCreateRayTracingPipelinesKHR")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkCmdTraceRaysIndirectKHR = reinterpret_cast(procAddr(device, "vkCmdTraceRaysIndirectKHR")); +#endif +#if (defined(VK_KHR_ray_tracing_maintenance1)) + fp_vkCmdTraceRaysIndirect2KHR = reinterpret_cast(procAddr(device, "vkCmdTraceRaysIndirect2KHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkGetDeviceAccelerationStructureCompatibilityKHR = reinterpret_cast(procAddr(device, "vkGetDeviceAccelerationStructureCompatibilityKHR")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkGetRayTracingShaderGroupStackSizeKHR = reinterpret_cast(procAddr(device, "vkGetRayTracingShaderGroupStackSizeKHR")); +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + fp_vkCmdSetRayTracingPipelineStackSizeKHR = reinterpret_cast(procAddr(device, "vkCmdSetRayTracingPipelineStackSizeKHR")); +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + fp_vkGetDeviceGroupSurfacePresentModes2EXT = reinterpret_cast(procAddr(device, "vkGetDeviceGroupSurfacePresentModes2EXT")); +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + fp_vkAcquireFullScreenExclusiveModeEXT = reinterpret_cast(procAddr(device, "vkAcquireFullScreenExclusiveModeEXT")); +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + fp_vkReleaseFullScreenExclusiveModeEXT = reinterpret_cast(procAddr(device, "vkReleaseFullScreenExclusiveModeEXT")); +#endif +#if (defined(VK_KHR_performance_query)) + fp_vkAcquireProfilingLockKHR = reinterpret_cast(procAddr(device, "vkAcquireProfilingLockKHR")); +#endif +#if (defined(VK_KHR_performance_query)) + fp_vkReleaseProfilingLockKHR = reinterpret_cast(procAddr(device, "vkReleaseProfilingLockKHR")); +#endif +#if (defined(VK_EXT_image_drm_format_modifier)) + fp_vkGetImageDrmFormatModifierPropertiesEXT = reinterpret_cast(procAddr(device, "vkGetImageDrmFormatModifierPropertiesEXT")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkGetBufferOpaqueCaptureAddress = reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureAddress")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkGetBufferDeviceAddress = reinterpret_cast(procAddr(device, "vkGetBufferDeviceAddress")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkInitializePerformanceApiINTEL = reinterpret_cast(procAddr(device, "vkInitializePerformanceApiINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkUninitializePerformanceApiINTEL = reinterpret_cast(procAddr(device, "vkUninitializePerformanceApiINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkCmdSetPerformanceMarkerINTEL = reinterpret_cast(procAddr(device, "vkCmdSetPerformanceMarkerINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkCmdSetPerformanceStreamMarkerINTEL = reinterpret_cast(procAddr(device, "vkCmdSetPerformanceStreamMarkerINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkCmdSetPerformanceOverrideINTEL = reinterpret_cast(procAddr(device, "vkCmdSetPerformanceOverrideINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkAcquirePerformanceConfigurationINTEL = reinterpret_cast(procAddr(device, "vkAcquirePerformanceConfigurationINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkReleasePerformanceConfigurationINTEL = reinterpret_cast(procAddr(device, "vkReleasePerformanceConfigurationINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkQueueSetPerformanceConfigurationINTEL = reinterpret_cast(procAddr(device, "vkQueueSetPerformanceConfigurationINTEL")); +#endif +#if (defined(VK_INTEL_performance_query)) + fp_vkGetPerformanceParameterINTEL = reinterpret_cast(procAddr(device, "vkGetPerformanceParameterINTEL")); +#endif +#if (defined(VK_VERSION_1_2)) + fp_vkGetDeviceMemoryOpaqueCaptureAddress = reinterpret_cast(procAddr(device, "vkGetDeviceMemoryOpaqueCaptureAddress")); +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + fp_vkGetPipelineExecutablePropertiesKHR = reinterpret_cast(procAddr(device, "vkGetPipelineExecutablePropertiesKHR")); +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + fp_vkGetPipelineExecutableStatisticsKHR = reinterpret_cast(procAddr(device, "vkGetPipelineExecutableStatisticsKHR")); +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + fp_vkGetPipelineExecutableInternalRepresentationsKHR = reinterpret_cast(procAddr(device, "vkGetPipelineExecutableInternalRepresentationsKHR")); +#endif +#if (defined(VK_KHR_line_rasterization)) + fp_vkCmdSetLineStippleKHR = reinterpret_cast(procAddr(device, "vkCmdSetLineStippleKHR")); +#endif +#if (defined(VKSC_VERSION_1_0)) + fp_vkGetFaultData = reinterpret_cast(procAddr(device, "vkGetFaultData")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCreateAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCreateAccelerationStructureKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCmdBuildAccelerationStructuresKHR = reinterpret_cast(procAddr(device, "vkCmdBuildAccelerationStructuresKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkCmdBuildAccelerationStructuresIndirectKHR = reinterpret_cast(procAddr(device, "vkCmdBuildAccelerationStructuresIndirectKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkBuildAccelerationStructuresKHR = reinterpret_cast(procAddr(device, "vkBuildAccelerationStructuresKHR")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkGetAccelerationStructureDeviceAddressKHR = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureDeviceAddressKHR")); +#endif +#if (defined(VK_KHR_deferred_host_operations)) + fp_vkCreateDeferredOperationKHR = reinterpret_cast(procAddr(device, "vkCreateDeferredOperationKHR")); +#endif +#if (defined(VK_KHR_deferred_host_operations)) + fp_vkDestroyDeferredOperationKHR = reinterpret_cast(procAddr(device, "vkDestroyDeferredOperationKHR")); +#endif +#if (defined(VK_KHR_deferred_host_operations)) + fp_vkGetDeferredOperationMaxConcurrencyKHR = reinterpret_cast(procAddr(device, "vkGetDeferredOperationMaxConcurrencyKHR")); +#endif +#if (defined(VK_KHR_deferred_host_operations)) + fp_vkGetDeferredOperationResultKHR = reinterpret_cast(procAddr(device, "vkGetDeferredOperationResultKHR")); +#endif +#if (defined(VK_KHR_deferred_host_operations)) + fp_vkDeferredOperationJoinKHR = reinterpret_cast(procAddr(device, "vkDeferredOperationJoinKHR")); +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + fp_vkGetPipelineIndirectMemoryRequirementsNV = reinterpret_cast(procAddr(device, "vkGetPipelineIndirectMemoryRequirementsNV")); +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + fp_vkGetPipelineIndirectDeviceAddressNV = reinterpret_cast(procAddr(device, "vkGetPipelineIndirectDeviceAddressNV")); +#endif +#if (defined(VK_AMD_anti_lag)) + fp_vkAntiLagUpdateAMD = reinterpret_cast(procAddr(device, "vkAntiLagUpdateAMD")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetCullMode = reinterpret_cast(procAddr(device, "vkCmdSetCullMode")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetFrontFace = reinterpret_cast(procAddr(device, "vkCmdSetFrontFace")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetPrimitiveTopology = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveTopology")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetViewportWithCount = reinterpret_cast(procAddr(device, "vkCmdSetViewportWithCount")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetScissorWithCount = reinterpret_cast(procAddr(device, "vkCmdSetScissorWithCount")); +#endif +#if (defined(VK_KHR_maintenance5)) + fp_vkCmdBindIndexBuffer2KHR = reinterpret_cast(procAddr(device, "vkCmdBindIndexBuffer2KHR")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdBindVertexBuffers2 = reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetDepthTestEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthTestEnable")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetDepthWriteEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthWriteEnable")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetDepthCompareOp = reinterpret_cast(procAddr(device, "vkCmdSetDepthCompareOp")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetDepthBoundsTestEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthBoundsTestEnable")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetStencilTestEnable = reinterpret_cast(procAddr(device, "vkCmdSetStencilTestEnable")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetStencilOp = reinterpret_cast(procAddr(device, "vkCmdSetStencilOp")); +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetPatchControlPointsEXT = reinterpret_cast(procAddr(device, "vkCmdSetPatchControlPointsEXT")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetRasterizerDiscardEnable = reinterpret_cast(procAddr(device, "vkCmdSetRasterizerDiscardEnable")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetDepthBiasEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthBiasEnable")); +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetLogicOpEXT = reinterpret_cast(procAddr(device, "vkCmdSetLogicOpEXT")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetPrimitiveRestartEnable = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveRestartEnable")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetTessellationDomainOriginEXT = reinterpret_cast(procAddr(device, "vkCmdSetTessellationDomainOriginEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthClampEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClampEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetPolygonModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetPolygonModeEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetRasterizationSamplesEXT = reinterpret_cast(procAddr(device, "vkCmdSetRasterizationSamplesEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetSampleMaskEXT = reinterpret_cast(procAddr(device, "vkCmdSetSampleMaskEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetAlphaToCoverageEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetAlphaToCoverageEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetAlphaToOneEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetAlphaToOneEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetLogicOpEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetLogicOpEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetColorBlendEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorBlendEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetColorBlendEquationEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorBlendEquationEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetColorWriteMaskEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorWriteMaskEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetRasterizationStreamEXT = reinterpret_cast(procAddr(device, "vkCmdSetRasterizationStreamEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetConservativeRasterizationModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetConservativeRasterizationModeEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT = reinterpret_cast(procAddr(device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthClipEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClipEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetSampleLocationsEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetSampleLocationsEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetColorBlendAdvancedEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorBlendAdvancedEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetProvokingVertexModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetProvokingVertexModeEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetLineRasterizationModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetLineRasterizationModeEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetLineStippleEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetLineStippleEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthClipNegativeOneToOneEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClipNegativeOneToOneEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetViewportWScalingEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportWScalingEnableNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetViewportSwizzleNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportSwizzleNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCoverageToColorEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageToColorEnableNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCoverageToColorLocationNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageToColorLocationNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCoverageModulationModeNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageModulationModeNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCoverageModulationTableEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageModulationTableEnableNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCoverageModulationTableNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageModulationTableNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetShadingRateImageEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetShadingRateImageEnableNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCoverageReductionModeNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageReductionModeNV")); +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetRepresentativeFragmentTestEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetRepresentativeFragmentTestEnableNV")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCreatePrivateDataSlot = reinterpret_cast(procAddr(device, "vkCreatePrivateDataSlot")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkDestroyPrivateDataSlot = reinterpret_cast(procAddr(device, "vkDestroyPrivateDataSlot")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkSetPrivateData = reinterpret_cast(procAddr(device, "vkSetPrivateData")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkGetPrivateData = reinterpret_cast(procAddr(device, "vkGetPrivateData")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdCopyBuffer2 = reinterpret_cast(procAddr(device, "vkCmdCopyBuffer2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdCopyImage2 = reinterpret_cast(procAddr(device, "vkCmdCopyImage2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdBlitImage2 = reinterpret_cast(procAddr(device, "vkCmdBlitImage2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdCopyBufferToImage2 = reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdCopyImageToBuffer2 = reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdResolveImage2 = reinterpret_cast(procAddr(device, "vkCmdResolveImage2")); +#endif +#if (defined(VK_KHR_object_refresh)) + fp_vkCmdRefreshObjectsKHR = reinterpret_cast(procAddr(device, "vkCmdRefreshObjectsKHR")); +#endif +#if (defined(VK_KHR_fragment_shading_rate)) + fp_vkCmdSetFragmentShadingRateKHR = reinterpret_cast(procAddr(device, "vkCmdSetFragmentShadingRateKHR")); +#endif +#if (defined(VK_NV_fragment_shading_rate_enums)) + fp_vkCmdSetFragmentShadingRateEnumNV = reinterpret_cast(procAddr(device, "vkCmdSetFragmentShadingRateEnumNV")); +#endif +#if (defined(VK_KHR_acceleration_structure)) + fp_vkGetAccelerationStructureBuildSizesKHR = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureBuildSizesKHR")); +#endif +#if (defined(VK_EXT_vertex_input_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetVertexInputEXT = reinterpret_cast(procAddr(device, "vkCmdSetVertexInputEXT")); +#endif +#if (defined(VK_EXT_color_write_enable)) + fp_vkCmdSetColorWriteEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorWriteEnableEXT")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdSetEvent2 = reinterpret_cast(procAddr(device, "vkCmdSetEvent2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdResetEvent2 = reinterpret_cast(procAddr(device, "vkCmdResetEvent2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdWaitEvents2 = reinterpret_cast(procAddr(device, "vkCmdWaitEvents2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdPipelineBarrier2 = reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkQueueSubmit2 = reinterpret_cast(procAddr(device, "vkQueueSubmit2")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdWriteTimestamp2 = reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp2")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkCmdWriteBufferMarker2AMD = reinterpret_cast(procAddr(device, "vkCmdWriteBufferMarker2AMD")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkGetQueueCheckpointData2NV = reinterpret_cast(procAddr(device, "vkGetQueueCheckpointData2NV")); +#endif +#if (defined(VK_EXT_host_image_copy)) + fp_vkCopyMemoryToImageEXT = reinterpret_cast(procAddr(device, "vkCopyMemoryToImageEXT")); +#endif +#if (defined(VK_EXT_host_image_copy)) + fp_vkCopyImageToMemoryEXT = reinterpret_cast(procAddr(device, "vkCopyImageToMemoryEXT")); +#endif +#if (defined(VK_EXT_host_image_copy)) + fp_vkCopyImageToImageEXT = reinterpret_cast(procAddr(device, "vkCopyImageToImageEXT")); +#endif +#if (defined(VK_EXT_host_image_copy)) + fp_vkTransitionImageLayoutEXT = reinterpret_cast(procAddr(device, "vkTransitionImageLayoutEXT")); +#endif +#if (defined(VKSC_VERSION_1_0)) + fp_vkGetCommandPoolMemoryConsumption = reinterpret_cast(procAddr(device, "vkGetCommandPoolMemoryConsumption")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkCreateVideoSessionKHR = reinterpret_cast(procAddr(device, "vkCreateVideoSessionKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkDestroyVideoSessionKHR = reinterpret_cast(procAddr(device, "vkDestroyVideoSessionKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkCreateVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkCreateVideoSessionParametersKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkUpdateVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkUpdateVideoSessionParametersKHR")); +#endif +#if (defined(VK_KHR_video_encode_queue)) + fp_vkGetEncodedVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkGetEncodedVideoSessionParametersKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkDestroyVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkDestroyVideoSessionParametersKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkGetVideoSessionMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetVideoSessionMemoryRequirementsKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkBindVideoSessionMemoryKHR = reinterpret_cast(procAddr(device, "vkBindVideoSessionMemoryKHR")); +#endif +#if (defined(VK_KHR_video_decode_queue)) + fp_vkCmdDecodeVideoKHR = reinterpret_cast(procAddr(device, "vkCmdDecodeVideoKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkCmdBeginVideoCodingKHR = reinterpret_cast(procAddr(device, "vkCmdBeginVideoCodingKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkCmdControlVideoCodingKHR = reinterpret_cast(procAddr(device, "vkCmdControlVideoCodingKHR")); +#endif +#if (defined(VK_KHR_video_queue)) + fp_vkCmdEndVideoCodingKHR = reinterpret_cast(procAddr(device, "vkCmdEndVideoCodingKHR")); +#endif +#if (defined(VK_KHR_video_encode_queue)) + fp_vkCmdEncodeVideoKHR = reinterpret_cast(procAddr(device, "vkCmdEncodeVideoKHR")); +#endif +#if (defined(VK_NV_memory_decompression)) + fp_vkCmdDecompressMemoryNV = reinterpret_cast(procAddr(device, "vkCmdDecompressMemoryNV")); +#endif +#if (defined(VK_NV_memory_decompression)) + fp_vkCmdDecompressMemoryIndirectCountNV = reinterpret_cast(procAddr(device, "vkCmdDecompressMemoryIndirectCountNV")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetDescriptorSetLayoutSizeEXT = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSizeEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetDescriptorSetLayoutBindingOffsetEXT = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutBindingOffsetEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetDescriptorEXT = reinterpret_cast(procAddr(device, "vkGetDescriptorEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkCmdBindDescriptorBuffersEXT = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBuffersEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkCmdSetDescriptorBufferOffsetsEXT = reinterpret_cast(procAddr(device, "vkCmdSetDescriptorBufferOffsetsEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetBufferOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureDescriptorDataEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetImageOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetImageOpaqueCaptureDescriptorDataEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT")); +#endif +#if (defined(VK_EXT_descriptor_buffer)) + fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT")); +#endif +#if (defined(VK_EXT_pageable_device_local_memory)) + fp_vkSetDeviceMemoryPriorityEXT = reinterpret_cast(procAddr(device, "vkSetDeviceMemoryPriorityEXT")); +#endif +#if (defined(VK_KHR_present_wait)) + fp_vkWaitForPresentKHR = reinterpret_cast(procAddr(device, "vkWaitForPresentKHR")); +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + fp_vkCreateBufferCollectionFUCHSIA = reinterpret_cast(procAddr(device, "vkCreateBufferCollectionFUCHSIA")); +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + fp_vkSetBufferCollectionBufferConstraintsFUCHSIA = reinterpret_cast(procAddr(device, "vkSetBufferCollectionBufferConstraintsFUCHSIA")); +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + fp_vkSetBufferCollectionImageConstraintsFUCHSIA = reinterpret_cast(procAddr(device, "vkSetBufferCollectionImageConstraintsFUCHSIA")); +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + fp_vkDestroyBufferCollectionFUCHSIA = reinterpret_cast(procAddr(device, "vkDestroyBufferCollectionFUCHSIA")); +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + fp_vkGetBufferCollectionPropertiesFUCHSIA = reinterpret_cast(procAddr(device, "vkGetBufferCollectionPropertiesFUCHSIA")); +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + fp_vkCreateCudaModuleNV = reinterpret_cast(procAddr(device, "vkCreateCudaModuleNV")); +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + fp_vkGetCudaModuleCacheNV = reinterpret_cast(procAddr(device, "vkGetCudaModuleCacheNV")); +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + fp_vkCreateCudaFunctionNV = reinterpret_cast(procAddr(device, "vkCreateCudaFunctionNV")); +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + fp_vkDestroyCudaModuleNV = reinterpret_cast(procAddr(device, "vkDestroyCudaModuleNV")); +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + fp_vkDestroyCudaFunctionNV = reinterpret_cast(procAddr(device, "vkDestroyCudaFunctionNV")); +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + fp_vkCmdCudaLaunchKernelNV = reinterpret_cast(procAddr(device, "vkCmdCudaLaunchKernelNV")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdBeginRendering = reinterpret_cast(procAddr(device, "vkCmdBeginRendering")); +#endif +#if (defined(VK_VERSION_1_3)) + fp_vkCmdEndRendering = reinterpret_cast(procAddr(device, "vkCmdEndRendering")); +#endif +#if (defined(VK_VALVE_descriptor_set_host_mapping)) + fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE")); +#endif +#if (defined(VK_VALVE_descriptor_set_host_mapping)) + fp_vkGetDescriptorSetHostMappingVALVE = reinterpret_cast(procAddr(device, "vkGetDescriptorSetHostMappingVALVE")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCreateMicromapEXT = reinterpret_cast(procAddr(device, "vkCreateMicromapEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCmdBuildMicromapsEXT = reinterpret_cast(procAddr(device, "vkCmdBuildMicromapsEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkBuildMicromapsEXT = reinterpret_cast(procAddr(device, "vkBuildMicromapsEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkDestroyMicromapEXT = reinterpret_cast(procAddr(device, "vkDestroyMicromapEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCmdCopyMicromapEXT = reinterpret_cast(procAddr(device, "vkCmdCopyMicromapEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCopyMicromapEXT = reinterpret_cast(procAddr(device, "vkCopyMicromapEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCmdCopyMicromapToMemoryEXT = reinterpret_cast(procAddr(device, "vkCmdCopyMicromapToMemoryEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCopyMicromapToMemoryEXT = reinterpret_cast(procAddr(device, "vkCopyMicromapToMemoryEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCmdCopyMemoryToMicromapEXT = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToMicromapEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCopyMemoryToMicromapEXT = reinterpret_cast(procAddr(device, "vkCopyMemoryToMicromapEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkCmdWriteMicromapsPropertiesEXT = reinterpret_cast(procAddr(device, "vkCmdWriteMicromapsPropertiesEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkWriteMicromapsPropertiesEXT = reinterpret_cast(procAddr(device, "vkWriteMicromapsPropertiesEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkGetDeviceMicromapCompatibilityEXT = reinterpret_cast(procAddr(device, "vkGetDeviceMicromapCompatibilityEXT")); +#endif +#if (defined(VK_EXT_opacity_micromap)) + fp_vkGetMicromapBuildSizesEXT = reinterpret_cast(procAddr(device, "vkGetMicromapBuildSizesEXT")); +#endif +#if (defined(VK_EXT_shader_module_identifier)) + fp_vkGetShaderModuleIdentifierEXT = reinterpret_cast(procAddr(device, "vkGetShaderModuleIdentifierEXT")); +#endif +#if (defined(VK_EXT_shader_module_identifier)) + fp_vkGetShaderModuleCreateInfoIdentifierEXT = reinterpret_cast(procAddr(device, "vkGetShaderModuleCreateInfoIdentifierEXT")); +#endif +#if (defined(VK_KHR_maintenance5)) + fp_vkGetImageSubresourceLayout2KHR = reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout2KHR")); +#endif +#if (defined(VK_EXT_pipeline_properties)) + fp_vkGetPipelinePropertiesEXT = reinterpret_cast(procAddr(device, "vkGetPipelinePropertiesEXT")); +#endif +#if (defined(VK_EXT_metal_objects)) + fp_vkExportMetalObjectsEXT = reinterpret_cast(procAddr(device, "vkExportMetalObjectsEXT")); +#endif +#if (defined(VK_QCOM_tile_properties)) + fp_vkGetFramebufferTilePropertiesQCOM = reinterpret_cast(procAddr(device, "vkGetFramebufferTilePropertiesQCOM")); +#endif +#if (defined(VK_QCOM_tile_properties)) + fp_vkGetDynamicRenderingTilePropertiesQCOM = reinterpret_cast(procAddr(device, "vkGetDynamicRenderingTilePropertiesQCOM")); +#endif +#if (defined(VK_NV_optical_flow)) + fp_vkCreateOpticalFlowSessionNV = reinterpret_cast(procAddr(device, "vkCreateOpticalFlowSessionNV")); +#endif +#if (defined(VK_NV_optical_flow)) + fp_vkDestroyOpticalFlowSessionNV = reinterpret_cast(procAddr(device, "vkDestroyOpticalFlowSessionNV")); +#endif +#if (defined(VK_NV_optical_flow)) + fp_vkBindOpticalFlowSessionImageNV = reinterpret_cast(procAddr(device, "vkBindOpticalFlowSessionImageNV")); +#endif +#if (defined(VK_NV_optical_flow)) + fp_vkCmdOpticalFlowExecuteNV = reinterpret_cast(procAddr(device, "vkCmdOpticalFlowExecuteNV")); +#endif +#if (defined(VK_EXT_device_fault)) + fp_vkGetDeviceFaultInfoEXT = reinterpret_cast(procAddr(device, "vkGetDeviceFaultInfoEXT")); +#endif +#if (defined(VK_EXT_depth_bias_control)) + fp_vkCmdSetDepthBias2EXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthBias2EXT")); +#endif +#if (defined(VK_EXT_swapchain_maintenance1)) + fp_vkReleaseSwapchainImagesEXT = reinterpret_cast(procAddr(device, "vkReleaseSwapchainImagesEXT")); +#endif +#if (defined(VK_KHR_maintenance5)) + fp_vkGetDeviceImageSubresourceLayoutKHR = reinterpret_cast(procAddr(device, "vkGetDeviceImageSubresourceLayoutKHR")); +#endif +#if (defined(VK_KHR_map_memory2)) + fp_vkMapMemory2KHR = reinterpret_cast(procAddr(device, "vkMapMemory2KHR")); +#endif +#if (defined(VK_KHR_map_memory2)) + fp_vkUnmapMemory2KHR = reinterpret_cast(procAddr(device, "vkUnmapMemory2KHR")); +#endif +#if (defined(VK_EXT_shader_object)) + fp_vkCreateShadersEXT = reinterpret_cast(procAddr(device, "vkCreateShadersEXT")); +#endif +#if (defined(VK_EXT_shader_object)) + fp_vkDestroyShaderEXT = reinterpret_cast(procAddr(device, "vkDestroyShaderEXT")); +#endif +#if (defined(VK_EXT_shader_object)) + fp_vkGetShaderBinaryDataEXT = reinterpret_cast(procAddr(device, "vkGetShaderBinaryDataEXT")); +#endif +#if (defined(VK_EXT_shader_object)) + fp_vkCmdBindShadersEXT = reinterpret_cast(procAddr(device, "vkCmdBindShadersEXT")); +#endif +#if (defined(VK_QNX_external_memory_screen_buffer)) + fp_vkGetScreenBufferPropertiesQNX = reinterpret_cast(procAddr(device, "vkGetScreenBufferPropertiesQNX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkGetExecutionGraphPipelineScratchSizeAMDX = reinterpret_cast(procAddr(device, "vkGetExecutionGraphPipelineScratchSizeAMDX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkGetExecutionGraphPipelineNodeIndexAMDX = reinterpret_cast(procAddr(device, "vkGetExecutionGraphPipelineNodeIndexAMDX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkCreateExecutionGraphPipelinesAMDX = reinterpret_cast(procAddr(device, "vkCreateExecutionGraphPipelinesAMDX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkCmdInitializeGraphScratchMemoryAMDX = reinterpret_cast(procAddr(device, "vkCmdInitializeGraphScratchMemoryAMDX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkCmdDispatchGraphAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphAMDX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkCmdDispatchGraphIndirectAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectAMDX")); +#endif +#if (defined(VK_AMDX_shader_enqueue)) + fp_vkCmdDispatchGraphIndirectCountAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectCountAMDX")); +#endif +#if (defined(VK_KHR_maintenance6)) + fp_vkCmdBindDescriptorSets2KHR = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorSets2KHR")); +#endif +#if (defined(VK_KHR_maintenance6)) + fp_vkCmdPushConstants2KHR = reinterpret_cast(procAddr(device, "vkCmdPushConstants2KHR")); +#endif +#if (defined(VK_KHR_maintenance6)) + fp_vkCmdPushDescriptorSet2KHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSet2KHR")); +#endif +#if (defined(VK_KHR_maintenance6)) + fp_vkCmdPushDescriptorSetWithTemplate2KHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetWithTemplate2KHR")); +#endif +#if (defined(VK_KHR_maintenance6)) + fp_vkCmdSetDescriptorBufferOffsets2EXT = reinterpret_cast(procAddr(device, "vkCmdSetDescriptorBufferOffsets2EXT")); +#endif +#if (defined(VK_KHR_maintenance6)) + fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT")); +#endif +#if (defined(VK_NV_low_latency2)) + fp_vkSetLatencySleepModeNV = reinterpret_cast(procAddr(device, "vkSetLatencySleepModeNV")); +#endif +#if (defined(VK_NV_low_latency2)) + fp_vkLatencySleepNV = reinterpret_cast(procAddr(device, "vkLatencySleepNV")); +#endif +#if (defined(VK_NV_low_latency2)) + fp_vkSetLatencyMarkerNV = reinterpret_cast(procAddr(device, "vkSetLatencyMarkerNV")); +#endif +#if ((defined(VK_NV_low_latency2))) && VK_HEADER_VERSION >= 271 + fp_vkGetLatencyTimingsNV = reinterpret_cast(procAddr(device, "vkGetLatencyTimingsNV")); +#endif +#if (defined(VK_NV_low_latency2)) + fp_vkQueueNotifyOutOfBandNV = reinterpret_cast(procAddr(device, "vkQueueNotifyOutOfBandNV")); +#endif +#if (defined(VK_KHR_dynamic_rendering_local_read)) + fp_vkCmdSetRenderingAttachmentLocationsKHR = reinterpret_cast(procAddr(device, "vkCmdSetRenderingAttachmentLocationsKHR")); +#endif +#if (defined(VK_KHR_dynamic_rendering_local_read)) + fp_vkCmdSetRenderingInputAttachmentIndicesKHR = reinterpret_cast(procAddr(device, "vkCmdSetRenderingInputAttachmentIndicesKHR")); +#endif +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_depth_clamp_control)) + fp_vkCmdSetDepthClampRangeEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClampRangeEXT")); +#endif +#if (defined(VK_EXT_host_query_reset)) + fp_vkResetQueryPoolEXT = reinterpret_cast(procAddr(device, "vkResetQueryPoolEXT")); +#endif +#if (defined(VK_KHR_maintenance1)) + fp_vkTrimCommandPoolKHR = reinterpret_cast(procAddr(device, "vkTrimCommandPoolKHR")); +#endif +#if (defined(VK_KHR_device_group)) + fp_vkGetDeviceGroupPeerMemoryFeaturesKHR = reinterpret_cast(procAddr(device, "vkGetDeviceGroupPeerMemoryFeaturesKHR")); +#endif +#if (defined(VK_KHR_bind_memory2)) + fp_vkBindBufferMemory2KHR = reinterpret_cast(procAddr(device, "vkBindBufferMemory2KHR")); +#endif +#if (defined(VK_KHR_bind_memory2)) + fp_vkBindImageMemory2KHR = reinterpret_cast(procAddr(device, "vkBindImageMemory2KHR")); +#endif +#if (defined(VK_KHR_device_group)) + fp_vkCmdSetDeviceMaskKHR = reinterpret_cast(procAddr(device, "vkCmdSetDeviceMaskKHR")); +#endif +#if (defined(VK_KHR_device_group)) + fp_vkCmdDispatchBaseKHR = reinterpret_cast(procAddr(device, "vkCmdDispatchBaseKHR")); +#endif +#if (defined(VK_KHR_descriptor_update_template)) + fp_vkCreateDescriptorUpdateTemplateKHR = reinterpret_cast(procAddr(device, "vkCreateDescriptorUpdateTemplateKHR")); +#endif +#if (defined(VK_KHR_descriptor_update_template)) + fp_vkDestroyDescriptorUpdateTemplateKHR = reinterpret_cast(procAddr(device, "vkDestroyDescriptorUpdateTemplateKHR")); +#endif +#if (defined(VK_KHR_descriptor_update_template)) + fp_vkUpdateDescriptorSetWithTemplateKHR = reinterpret_cast(procAddr(device, "vkUpdateDescriptorSetWithTemplateKHR")); +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + fp_vkGetBufferMemoryRequirements2KHR = reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements2KHR")); +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + fp_vkGetImageMemoryRequirements2KHR = reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements2KHR")); +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + fp_vkGetImageSparseMemoryRequirements2KHR = reinterpret_cast(procAddr(device, "vkGetImageSparseMemoryRequirements2KHR")); +#endif +#if (defined(VK_KHR_maintenance4)) + fp_vkGetDeviceBufferMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetDeviceBufferMemoryRequirementsKHR")); +#endif +#if (defined(VK_KHR_maintenance4)) + fp_vkGetDeviceImageMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetDeviceImageMemoryRequirementsKHR")); +#endif +#if (defined(VK_KHR_maintenance4)) + fp_vkGetDeviceImageSparseMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetDeviceImageSparseMemoryRequirementsKHR")); +#endif +#if (defined(VK_KHR_sampler_ycbcr_conversion)) + fp_vkCreateSamplerYcbcrConversionKHR = reinterpret_cast(procAddr(device, "vkCreateSamplerYcbcrConversionKHR")); +#endif +#if (defined(VK_KHR_sampler_ycbcr_conversion)) + fp_vkDestroySamplerYcbcrConversionKHR = reinterpret_cast(procAddr(device, "vkDestroySamplerYcbcrConversionKHR")); +#endif +#if (defined(VK_KHR_maintenance3)) + fp_vkGetDescriptorSetLayoutSupportKHR = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSupportKHR")); +#endif +#if (defined(VK_EXT_calibrated_timestamps)) + fp_vkGetCalibratedTimestampsEXT = reinterpret_cast(procAddr(device, "vkGetCalibratedTimestampsEXT")); +#endif +#if (defined(VK_KHR_create_renderpass2)) + fp_vkCreateRenderPass2KHR = reinterpret_cast(procAddr(device, "vkCreateRenderPass2KHR")); +#endif +#if (defined(VK_KHR_create_renderpass2)) + fp_vkCmdBeginRenderPass2KHR = reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass2KHR")); +#endif +#if (defined(VK_KHR_create_renderpass2)) + fp_vkCmdNextSubpass2KHR = reinterpret_cast(procAddr(device, "vkCmdNextSubpass2KHR")); +#endif +#if (defined(VK_KHR_create_renderpass2)) + fp_vkCmdEndRenderPass2KHR = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass2KHR")); +#endif +#if (defined(VK_KHR_timeline_semaphore)) + fp_vkGetSemaphoreCounterValueKHR = reinterpret_cast(procAddr(device, "vkGetSemaphoreCounterValueKHR")); +#endif +#if (defined(VK_KHR_timeline_semaphore)) + fp_vkWaitSemaphoresKHR = reinterpret_cast(procAddr(device, "vkWaitSemaphoresKHR")); +#endif +#if (defined(VK_KHR_timeline_semaphore)) + fp_vkSignalSemaphoreKHR = reinterpret_cast(procAddr(device, "vkSignalSemaphoreKHR")); +#endif +#if (defined(VK_AMD_draw_indirect_count)) + fp_vkCmdDrawIndirectCountAMD = reinterpret_cast(procAddr(device, "vkCmdDrawIndirectCountAMD")); +#endif +#if (defined(VK_AMD_draw_indirect_count)) + fp_vkCmdDrawIndexedIndirectCountAMD = reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirectCountAMD")); +#endif +#if (defined(VK_NV_ray_tracing)) + fp_vkGetRayTracingShaderGroupHandlesNV = reinterpret_cast(procAddr(device, "vkGetRayTracingShaderGroupHandlesNV")); +#endif +#if (defined(VK_KHR_buffer_device_address)) + fp_vkGetBufferOpaqueCaptureAddressKHR = reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureAddressKHR")); +#endif +#if (defined(VK_EXT_buffer_device_address)) + fp_vkGetBufferDeviceAddressEXT = reinterpret_cast(procAddr(device, "vkGetBufferDeviceAddressEXT")); +#endif +#if (defined(VK_KHR_buffer_device_address)) + fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR = reinterpret_cast(procAddr(device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR")); +#endif +#if (defined(VK_EXT_line_rasterization)) + fp_vkCmdSetLineStippleEXT = reinterpret_cast(procAddr(device, "vkCmdSetLineStippleEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetCullModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetCullModeEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetFrontFaceEXT = reinterpret_cast(procAddr(device, "vkCmdSetFrontFaceEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetPrimitiveTopologyEXT = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveTopologyEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetViewportWithCountEXT = reinterpret_cast(procAddr(device, "vkCmdSetViewportWithCountEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetScissorWithCountEXT = reinterpret_cast(procAddr(device, "vkCmdSetScissorWithCountEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdBindVertexBuffers2EXT = reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers2EXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthTestEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthTestEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthWriteEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthWriteEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthCompareOpEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthCompareOpEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthBoundsTestEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthBoundsTestEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetStencilTestEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetStencilTestEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetStencilOpEXT = reinterpret_cast(procAddr(device, "vkCmdSetStencilOpEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetRasterizerDiscardEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetRasterizerDiscardEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetDepthBiasEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthBiasEnableEXT")); +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + fp_vkCmdSetPrimitiveRestartEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveRestartEnableEXT")); +#endif +#if (defined(VK_EXT_private_data)) + fp_vkCreatePrivateDataSlotEXT = reinterpret_cast(procAddr(device, "vkCreatePrivateDataSlotEXT")); +#endif +#if (defined(VK_EXT_private_data)) + fp_vkDestroyPrivateDataSlotEXT = reinterpret_cast(procAddr(device, "vkDestroyPrivateDataSlotEXT")); +#endif +#if (defined(VK_EXT_private_data)) + fp_vkSetPrivateDataEXT = reinterpret_cast(procAddr(device, "vkSetPrivateDataEXT")); +#endif +#if (defined(VK_EXT_private_data)) + fp_vkGetPrivateDataEXT = reinterpret_cast(procAddr(device, "vkGetPrivateDataEXT")); +#endif +#if (defined(VK_KHR_copy_commands2)) + fp_vkCmdCopyBuffer2KHR = reinterpret_cast(procAddr(device, "vkCmdCopyBuffer2KHR")); +#endif +#if (defined(VK_KHR_copy_commands2)) + fp_vkCmdCopyImage2KHR = reinterpret_cast(procAddr(device, "vkCmdCopyImage2KHR")); +#endif +#if (defined(VK_KHR_copy_commands2)) + fp_vkCmdBlitImage2KHR = reinterpret_cast(procAddr(device, "vkCmdBlitImage2KHR")); +#endif +#if (defined(VK_KHR_copy_commands2)) + fp_vkCmdCopyBufferToImage2KHR = reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage2KHR")); +#endif +#if (defined(VK_KHR_copy_commands2)) + fp_vkCmdCopyImageToBuffer2KHR = reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer2KHR")); +#endif +#if (defined(VK_KHR_copy_commands2)) + fp_vkCmdResolveImage2KHR = reinterpret_cast(procAddr(device, "vkCmdResolveImage2KHR")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkCmdSetEvent2KHR = reinterpret_cast(procAddr(device, "vkCmdSetEvent2KHR")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkCmdResetEvent2KHR = reinterpret_cast(procAddr(device, "vkCmdResetEvent2KHR")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkCmdWaitEvents2KHR = reinterpret_cast(procAddr(device, "vkCmdWaitEvents2KHR")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkCmdPipelineBarrier2KHR = reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier2KHR")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkQueueSubmit2KHR = reinterpret_cast(procAddr(device, "vkQueueSubmit2KHR")); +#endif +#if (defined(VK_KHR_synchronization2)) + fp_vkCmdWriteTimestamp2KHR = reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp2KHR")); +#endif +#if (defined(VK_KHR_dynamic_rendering)) + fp_vkCmdBeginRenderingKHR = reinterpret_cast(procAddr(device, "vkCmdBeginRenderingKHR")); +#endif +#if (defined(VK_KHR_dynamic_rendering)) + fp_vkCmdEndRenderingKHR = reinterpret_cast(procAddr(device, "vkCmdEndRenderingKHR")); +#endif +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) + fp_vkGetImageSubresourceLayout2EXT = reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout2EXT")); +#endif + } + void getDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) const noexcept { + fp_vkGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue); + } + VkResult queueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const noexcept { + return fp_vkQueueSubmit(queue, submitCount, pSubmits, fence); + } + VkResult queueWaitIdle(VkQueue queue) const noexcept { + return fp_vkQueueWaitIdle(queue); + } + VkResult deviceWaitIdle() const noexcept { + return fp_vkDeviceWaitIdle(device); + } + VkResult allocateMemory(const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const noexcept { + return fp_vkAllocateMemory(device, pAllocateInfo, pAllocator, pMemory); + } + void freeMemory(VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkFreeMemory(device, memory, pAllocator); + } + VkResult mapMemory(VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) const noexcept { + return fp_vkMapMemory(device, memory, offset, size, flags, ppData); + } + void unmapMemory(VkDeviceMemory memory) const noexcept { + fp_vkUnmapMemory(device, memory); + } + VkResult flushMappedMemoryRanges(uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const noexcept { + return fp_vkFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); + } + VkResult invalidateMappedMemoryRanges(uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const noexcept { + return fp_vkInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); + } + void getDeviceMemoryCommitment(VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const noexcept { + fp_vkGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); + } + void getBufferMemoryRequirements(VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const noexcept { + fp_vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements); + } + VkResult bindBufferMemory(VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const noexcept { + return fp_vkBindBufferMemory(device, buffer, memory, memoryOffset); + } + void getImageMemoryRequirements(VkImage image, VkMemoryRequirements* pMemoryRequirements) const noexcept { + fp_vkGetImageMemoryRequirements(device, image, pMemoryRequirements); + } + VkResult bindImageMemory(VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const noexcept { + return fp_vkBindImageMemory(device, image, memory, memoryOffset); + } + void getImageSparseMemoryRequirements(VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const noexcept { + fp_vkGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + VkResult queueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) const noexcept { + return fp_vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence); + } + VkResult createFence(const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const noexcept { + return fp_vkCreateFence(device, pCreateInfo, pAllocator, pFence); + } + void destroyFence(VkFence fence, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyFence(device, fence, pAllocator); + } + VkResult resetFences(uint32_t fenceCount, const VkFence* pFences) const noexcept { + return fp_vkResetFences(device, fenceCount, pFences); + } + VkResult getFenceStatus(VkFence fence) const noexcept { + return fp_vkGetFenceStatus(device, fence); + } + VkResult waitForFences(uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) const noexcept { + return fp_vkWaitForFences(device, fenceCount, pFences, waitAll, timeout); + } + VkResult createSemaphore(const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const noexcept { + return fp_vkCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); + } + void destroySemaphore(VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySemaphore(device, semaphore, pAllocator); + } + VkResult createEvent(const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) const noexcept { + return fp_vkCreateEvent(device, pCreateInfo, pAllocator, pEvent); + } + void destroyEvent(VkEvent event, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyEvent(device, event, pAllocator); + } + VkResult getEventStatus(VkEvent event) const noexcept { + return fp_vkGetEventStatus(device, event); + } + VkResult setEvent(VkEvent event) const noexcept { + return fp_vkSetEvent(device, event); + } + VkResult resetEvent(VkEvent event) const noexcept { + return fp_vkResetEvent(device, event); + } + VkResult createQueryPool(const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const noexcept { + return fp_vkCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); + } + void destroyQueryPool(VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyQueryPool(device, queryPool, pAllocator); + } + VkResult getQueryPoolResults(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const noexcept { + return fp_vkGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); + } +#if (defined(VK_VERSION_1_2)) + void resetQueryPool(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept { + fp_vkResetQueryPool(device, queryPool, firstQuery, queryCount); + } +#endif + VkResult createBuffer(const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const noexcept { + return fp_vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer); + } + void destroyBuffer(VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyBuffer(device, buffer, pAllocator); + } + VkResult createBufferView(const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const noexcept { + return fp_vkCreateBufferView(device, pCreateInfo, pAllocator, pView); + } + void destroyBufferView(VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyBufferView(device, bufferView, pAllocator); + } + VkResult createImage(const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) const noexcept { + return fp_vkCreateImage(device, pCreateInfo, pAllocator, pImage); + } + void destroyImage(VkImage image, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyImage(device, image, pAllocator); + } + void getImageSubresourceLayout(VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) const noexcept { + fp_vkGetImageSubresourceLayout(device, image, pSubresource, pLayout); + } + VkResult createImageView(const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) const noexcept { + return fp_vkCreateImageView(device, pCreateInfo, pAllocator, pView); + } + void destroyImageView(VkImageView imageView, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyImageView(device, imageView, pAllocator); + } + VkResult createShaderModule(const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const noexcept { + return fp_vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); + } + void destroyShaderModule(VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyShaderModule(device, shaderModule, pAllocator); + } + VkResult createPipelineCache(const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) const noexcept { + return fp_vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); + } + void destroyPipelineCache(VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyPipelineCache(device, pipelineCache, pAllocator); + } + VkResult getPipelineCacheData(VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) const noexcept { + return fp_vkGetPipelineCacheData(device, pipelineCache, pDataSize, pData); + } + VkResult mergePipelineCaches(VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) const noexcept { + return fp_vkMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches); + } +#if (defined(VK_KHR_pipeline_binary)) + VkResult createPipelineBinariesKHR(const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries) const noexcept { + return fp_vkCreatePipelineBinariesKHR(device, pCreateInfo, pAllocator, pBinaries); + } +#endif +#if (defined(VK_KHR_pipeline_binary)) + void destroyPipelineBinaryKHR(VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyPipelineBinaryKHR(device, pipelineBinary, pAllocator); + } +#endif +#if (defined(VK_KHR_pipeline_binary)) + VkResult getPipelineKeyKHR(const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey) const noexcept { + return fp_vkGetPipelineKeyKHR(device, pPipelineCreateInfo, pPipelineKey); + } +#endif +#if (defined(VK_KHR_pipeline_binary)) + VkResult getPipelineBinaryDataKHR(const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData) const noexcept { + return fp_vkGetPipelineBinaryDataKHR(device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData); + } +#endif +#if (defined(VK_KHR_pipeline_binary)) + VkResult releaseCapturedPipelineDataKHR(const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator) const noexcept { + return fp_vkReleaseCapturedPipelineDataKHR(device, pInfo, pAllocator); + } +#endif + VkResult createGraphicsPipelines(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { + return fp_vkCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } + VkResult createComputePipelines(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { + return fp_vkCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } +#if (defined(VK_HUAWEI_subpass_shading)) + VkResult getDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize) const noexcept { + return fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(device, renderpass, pMaxWorkgroupSize); + } +#endif + void destroyPipeline(VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyPipeline(device, pipeline, pAllocator); + } + VkResult createPipelineLayout(const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const noexcept { + return fp_vkCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); + } + void destroyPipelineLayout(VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyPipelineLayout(device, pipelineLayout, pAllocator); + } + VkResult createSampler(const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) const noexcept { + return fp_vkCreateSampler(device, pCreateInfo, pAllocator, pSampler); + } + void destroySampler(VkSampler sampler, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySampler(device, sampler, pAllocator); + } + VkResult createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const noexcept { + return fp_vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout); + } + void destroyDescriptorSetLayout(VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); + } + VkResult createDescriptorPool(const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) const noexcept { + return fp_vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); + } + void destroyDescriptorPool(VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDescriptorPool(device, descriptorPool, pAllocator); + } + VkResult resetDescriptorPool(VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const noexcept { + return fp_vkResetDescriptorPool(device, descriptorPool, flags); + } + VkResult allocateDescriptorSets(const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) const noexcept { + return fp_vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); + } + VkResult freeDescriptorSets(VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets) const noexcept { + return fp_vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets); + } + void updateDescriptorSets(uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) const noexcept { + fp_vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); + } + VkResult createFramebuffer(const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const noexcept { + return fp_vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); + } + void destroyFramebuffer(VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyFramebuffer(device, framebuffer, pAllocator); + } + VkResult createRenderPass(const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const noexcept { + return fp_vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); + } + void destroyRenderPass(VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyRenderPass(device, renderPass, pAllocator); + } + void getRenderAreaGranularity(VkRenderPass renderPass, VkExtent2D* pGranularity) const noexcept { + fp_vkGetRenderAreaGranularity(device, renderPass, pGranularity); + } +#if (defined(VK_KHR_maintenance5)) + void getRenderingAreaGranularityKHR(const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity) const noexcept { + fp_vkGetRenderingAreaGranularityKHR(device, pRenderingAreaInfo, pGranularity); + } +#endif + VkResult createCommandPool(const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const noexcept { + return fp_vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); + } + void destroyCommandPool(VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyCommandPool(device, commandPool, pAllocator); + } + VkResult resetCommandPool(VkCommandPool commandPool, VkCommandPoolResetFlags flags) const noexcept { + return fp_vkResetCommandPool(device, commandPool, flags); + } + VkResult allocateCommandBuffers(const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) const noexcept { + return fp_vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); + } + void freeCommandBuffers(VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) const noexcept { + fp_vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); + } + VkResult beginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const noexcept { + return fp_vkBeginCommandBuffer(commandBuffer, pBeginInfo); + } + VkResult endCommandBuffer(VkCommandBuffer commandBuffer) const noexcept { + return fp_vkEndCommandBuffer(commandBuffer); + } + VkResult resetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const noexcept { + return fp_vkResetCommandBuffer(commandBuffer, flags); + } + void cmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const noexcept { + fp_vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); + } +#if (defined(VK_EXT_attachment_feedback_loop_dynamic_state)) + void cmdSetAttachmentFeedbackLoopEnableEXT(VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask) const noexcept { + fp_vkCmdSetAttachmentFeedbackLoopEnableEXT(commandBuffer, aspectMask); + } +#endif + void cmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) const noexcept { + fp_vkCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports); + } + void cmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) const noexcept { + fp_vkCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); + } + void cmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const noexcept { + fp_vkCmdSetLineWidth(commandBuffer, lineWidth); + } + void cmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) const noexcept { + fp_vkCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); + } + void cmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const noexcept { + fp_vkCmdSetBlendConstants(commandBuffer, blendConstants); + } + void cmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const noexcept { + fp_vkCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); + } + void cmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) const noexcept { + fp_vkCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); + } + void cmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) const noexcept { + fp_vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); + } + void cmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) const noexcept { + fp_vkCmdSetStencilReference(commandBuffer, faceMask, reference); + } + void cmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) const noexcept { + fp_vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); + } + void cmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const noexcept { + fp_vkCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); + } + void cmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const noexcept { + fp_vkCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); + } + void cmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) const noexcept { + fp_vkCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); + } + void cmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const noexcept { + fp_vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); + } +#if (defined(VK_EXT_multi_draw)) + void cmdDrawMultiEXT(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride) const noexcept { + fp_vkCmdDrawMultiEXT(commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride); + } +#endif +#if (defined(VK_EXT_multi_draw)) + void cmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset) const noexcept { + fp_vkCmdDrawMultiIndexedEXT(commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset); + } +#endif + void cmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride); + } + void cmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride); + } + void cmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + fp_vkCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ); + } + void cmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const noexcept { + fp_vkCmdDispatchIndirect(commandBuffer, buffer, offset); + } +#if (defined(VK_HUAWEI_subpass_shading)) + void cmdSubpassShadingHUAWEI(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdSubpassShadingHUAWEI(commandBuffer); + } +#endif +#if (defined(VK_HUAWEI_cluster_culling_shader)) + void cmdDrawClusterHUAWEI(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + fp_vkCmdDrawClusterHUAWEI(commandBuffer, groupCountX, groupCountY, groupCountZ); + } +#endif +#if (defined(VK_HUAWEI_cluster_culling_shader)) + void cmdDrawClusterIndirectHUAWEI(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const noexcept { + fp_vkCmdDrawClusterIndirectHUAWEI(commandBuffer, buffer, offset); + } +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + void cmdUpdatePipelineIndirectBufferNV(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const noexcept { + fp_vkCmdUpdatePipelineIndirectBufferNV(commandBuffer, pipelineBindPoint, pipeline); + } +#endif + void cmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) const noexcept { + fp_vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); + } + void cmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) const noexcept { + fp_vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); + } + void cmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) const noexcept { + fp_vkCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); + } + void cmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) const noexcept { + fp_vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); + } + void cmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) const noexcept { + fp_vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); + } +#if (defined(VK_NV_copy_memory_indirect)) + void cmdCopyMemoryIndirectNV(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride) const noexcept { + fp_vkCmdCopyMemoryIndirectNV(commandBuffer, copyBufferAddress, copyCount, stride); + } +#endif +#if (defined(VK_NV_copy_memory_indirect)) + void cmdCopyMemoryToImageIndirectNV(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources) const noexcept { + fp_vkCmdCopyMemoryToImageIndirectNV(commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources); + } +#endif + void cmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) const noexcept { + fp_vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); + } + void cmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) const noexcept { + fp_vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); + } + void cmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) const noexcept { + fp_vkCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); + } + void cmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) const noexcept { + fp_vkCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); + } + void cmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) const noexcept { + fp_vkCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); + } + void cmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) const noexcept { + fp_vkCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); + } + void cmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const noexcept { + fp_vkCmdSetEvent(commandBuffer, event, stageMask); + } + void cmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const noexcept { + fp_vkCmdResetEvent(commandBuffer, event, stageMask); + } + void cmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept { + fp_vkCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); + } + void cmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept { + fp_vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); + } + void cmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) const noexcept { + fp_vkCmdBeginQuery(commandBuffer, queryPool, query, flags); + } + void cmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) const noexcept { + fp_vkCmdEndQuery(commandBuffer, queryPool, query); + } +#if (defined(VK_EXT_conditional_rendering)) + void cmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) const noexcept { + fp_vkCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin); + } +#endif +#if (defined(VK_EXT_conditional_rendering)) + void cmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdEndConditionalRenderingEXT(commandBuffer); + } +#endif + void cmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept { + fp_vkCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); + } + void cmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) const noexcept { + fp_vkCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); + } + void cmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) const noexcept { + fp_vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); + } + void cmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) const noexcept { + fp_vkCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); + } + void cmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) const noexcept { + fp_vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); + } + void cmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const noexcept { + fp_vkCmdNextSubpass(commandBuffer, contents); + } + void cmdEndRenderPass(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdEndRenderPass(commandBuffer); + } + void cmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) const noexcept { + fp_vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers); + } +#if (defined(VK_KHR_display_swapchain)) + VkResult createSharedSwapchainsKHR(uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) const noexcept { + return fp_vkCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); + } +#endif +#if (defined(VK_KHR_swapchain)) + VkResult createSwapchainKHR(const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) const noexcept { + return fp_vkCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); + } +#endif +#if (defined(VK_KHR_swapchain)) + void destroySwapchainKHR(VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySwapchainKHR(device, swapchain, pAllocator); + } +#endif +#if (defined(VK_KHR_swapchain)) + VkResult getSwapchainImagesKHR(VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) const noexcept { + return fp_vkGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages); + } +#endif +#if (defined(VK_KHR_swapchain)) + VkResult acquireNextImageKHR(VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) const noexcept { + return fp_vkAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex); + } +#endif +#if (defined(VK_KHR_swapchain)) + VkResult queuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) const noexcept { + return fp_vkQueuePresentKHR(queue, pPresentInfo); + } +#endif +#if (defined(VK_EXT_debug_marker)) + VkResult debugMarkerSetObjectNameEXT(const VkDebugMarkerObjectNameInfoEXT* pNameInfo) const noexcept { + return fp_vkDebugMarkerSetObjectNameEXT(device, pNameInfo); + } +#endif +#if (defined(VK_EXT_debug_marker)) + VkResult debugMarkerSetObjectTagEXT(const VkDebugMarkerObjectTagInfoEXT* pTagInfo) const noexcept { + return fp_vkDebugMarkerSetObjectTagEXT(device, pTagInfo); + } +#endif +#if (defined(VK_EXT_debug_marker)) + void cmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const noexcept { + fp_vkCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo); + } +#endif +#if (defined(VK_EXT_debug_marker)) + void cmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdDebugMarkerEndEXT(commandBuffer); + } +#endif +#if (defined(VK_EXT_debug_marker)) + void cmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const noexcept { + fp_vkCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo); + } +#endif +#if (defined(VK_NV_external_memory_win32)) + VkResult getMemoryWin32HandleNV(VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) const noexcept { + return fp_vkGetMemoryWin32HandleNV(device, memory, handleType, pHandle); + } +#endif +#if (defined(VK_NV_device_generated_commands)) + void cmdExecuteGeneratedCommandsNV(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) const noexcept { + fp_vkCmdExecuteGeneratedCommandsNV(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); + } +#endif +#if (defined(VK_NV_device_generated_commands)) + void cmdPreprocessGeneratedCommandsNV(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) const noexcept { + fp_vkCmdPreprocessGeneratedCommandsNV(commandBuffer, pGeneratedCommandsInfo); + } +#endif +#if (defined(VK_NV_device_generated_commands)) + void cmdBindPipelineShaderGroupNV(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex) const noexcept { + fp_vkCmdBindPipelineShaderGroupNV(commandBuffer, pipelineBindPoint, pipeline, groupIndex); + } +#endif +#if (defined(VK_NV_device_generated_commands)) + void getGeneratedCommandsMemoryRequirementsNV(const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetGeneratedCommandsMemoryRequirementsNV(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_NV_device_generated_commands)) + VkResult createIndirectCommandsLayoutNV(const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout) const noexcept { + return fp_vkCreateIndirectCommandsLayoutNV(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); + } +#endif +#if (defined(VK_NV_device_generated_commands)) + void destroyIndirectCommandsLayoutNV(VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyIndirectCommandsLayoutNV(device, indirectCommandsLayout, pAllocator); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void cmdExecuteGeneratedCommandsEXT(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo) const noexcept { + fp_vkCmdExecuteGeneratedCommandsEXT(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void cmdPreprocessGeneratedCommandsEXT(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer) const noexcept { + fp_vkCmdPreprocessGeneratedCommandsEXT(commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void getGeneratedCommandsMemoryRequirementsEXT(const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetGeneratedCommandsMemoryRequirementsEXT(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + VkResult createIndirectCommandsLayoutEXT(const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout) const noexcept { + return fp_vkCreateIndirectCommandsLayoutEXT(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void destroyIndirectCommandsLayoutEXT(VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyIndirectCommandsLayoutEXT(device, indirectCommandsLayout, pAllocator); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + VkResult createIndirectExecutionSetEXT(const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet) const noexcept { + return fp_vkCreateIndirectExecutionSetEXT(device, pCreateInfo, pAllocator, pIndirectExecutionSet); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void destroyIndirectExecutionSetEXT(VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyIndirectExecutionSetEXT(device, indirectExecutionSet, pAllocator); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void updateIndirectExecutionSetPipelineEXT(VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites) const noexcept { + fp_vkUpdateIndirectExecutionSetPipelineEXT(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); + } +#endif +#if (defined(VK_EXT_device_generated_commands)) + void updateIndirectExecutionSetShaderEXT(VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites) const noexcept { + fp_vkUpdateIndirectExecutionSetShaderEXT(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); + } +#endif +#if (defined(VK_KHR_push_descriptor)) + void cmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) const noexcept { + fp_vkCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); + } +#endif +#if (defined(VK_VERSION_1_1)) + void trimCommandPool(VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags) const noexcept { + fp_vkTrimCommandPool(device, commandPool, flags); + } +#endif +#if (defined(VK_KHR_external_memory_win32)) + VkResult getMemoryWin32HandleKHR(const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const noexcept { + return fp_vkGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle); + } +#endif +#if (defined(VK_KHR_external_memory_win32)) + VkResult getMemoryWin32HandlePropertiesKHR(VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) const noexcept { + return fp_vkGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties); + } +#endif +#if (defined(VK_KHR_external_memory_fd)) + VkResult getMemoryFdKHR(const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept { + return fp_vkGetMemoryFdKHR(device, pGetFdInfo, pFd); + } +#endif +#if (defined(VK_KHR_external_memory_fd)) + VkResult getMemoryFdPropertiesKHR(VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) const noexcept { + return fp_vkGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties); + } +#endif +#if (defined(VK_FUCHSIA_external_memory)) + VkResult getMemoryZirconHandleFUCHSIA(const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle) const noexcept { + return fp_vkGetMemoryZirconHandleFUCHSIA(device, pGetZirconHandleInfo, pZirconHandle); + } +#endif +#if (defined(VK_FUCHSIA_external_memory)) + VkResult getMemoryZirconHandlePropertiesFUCHSIA(VkExternalMemoryHandleTypeFlagBitsKHR handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties) const noexcept { + return fp_vkGetMemoryZirconHandlePropertiesFUCHSIA(device, handleType, zirconHandle, pMemoryZirconHandleProperties); + } +#endif +#if (defined(VK_NV_external_memory_rdma)) + VkResult getMemoryRemoteAddressNV(const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress) const noexcept { + return fp_vkGetMemoryRemoteAddressNV(device, pMemoryGetRemoteAddressInfo, pAddress); + } +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + VkResult getMemorySciBufNV(const VkMemoryGetSciBufInfoNV* pGetSciBufInfo, NvSciBufObj* pHandle) const noexcept { + return fp_vkGetMemorySciBufNV(device, pGetSciBufInfo, pHandle); + } +#endif +#if (defined(VK_KHR_external_semaphore_win32)) + VkResult getSemaphoreWin32HandleKHR(const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const noexcept { + return fp_vkGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle); + } +#endif +#if (defined(VK_KHR_external_semaphore_win32)) + VkResult importSemaphoreWin32HandleKHR(const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) const noexcept { + return fp_vkImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo); + } +#endif +#if (defined(VK_KHR_external_semaphore_fd)) + VkResult getSemaphoreFdKHR(const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept { + return fp_vkGetSemaphoreFdKHR(device, pGetFdInfo, pFd); + } +#endif +#if (defined(VK_KHR_external_semaphore_fd)) + VkResult importSemaphoreFdKHR(const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) const noexcept { + return fp_vkImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo); + } +#endif +#if (defined(VK_FUCHSIA_external_semaphore)) + VkResult getSemaphoreZirconHandleFUCHSIA(const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle) const noexcept { + return fp_vkGetSemaphoreZirconHandleFUCHSIA(device, pGetZirconHandleInfo, pZirconHandle); + } +#endif +#if (defined(VK_FUCHSIA_external_semaphore)) + VkResult importSemaphoreZirconHandleFUCHSIA(const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo) const noexcept { + return fp_vkImportSemaphoreZirconHandleFUCHSIA(device, pImportSemaphoreZirconHandleInfo); + } +#endif +#if (defined(VK_KHR_external_fence_win32)) + VkResult getFenceWin32HandleKHR(const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const noexcept { + return fp_vkGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle); + } +#endif +#if (defined(VK_KHR_external_fence_win32)) + VkResult importFenceWin32HandleKHR(const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) const noexcept { + return fp_vkImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo); + } +#endif +#if (defined(VK_KHR_external_fence_fd)) + VkResult getFenceFdKHR(const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept { + return fp_vkGetFenceFdKHR(device, pGetFdInfo, pFd); + } +#endif +#if (defined(VK_KHR_external_fence_fd)) + VkResult importFenceFdKHR(const VkImportFenceFdInfoKHR* pImportFenceFdInfo) const noexcept { + return fp_vkImportFenceFdKHR(device, pImportFenceFdInfo); + } +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + VkResult getFenceSciSyncFenceNV(const VkFenceGetSciSyncInfoNV* pGetSciSyncHandleInfo, void* pHandle) const noexcept { + return fp_vkGetFenceSciSyncFenceNV(device, pGetSciSyncHandleInfo, pHandle); + } +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + VkResult getFenceSciSyncObjNV(const VkFenceGetSciSyncInfoNV* pGetSciSyncHandleInfo, void* pHandle) const noexcept { + return fp_vkGetFenceSciSyncObjNV(device, pGetSciSyncHandleInfo, pHandle); + } +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + VkResult importFenceSciSyncFenceNV(const VkImportFenceSciSyncInfoNV* pImportFenceSciSyncInfo) const noexcept { + return fp_vkImportFenceSciSyncFenceNV(device, pImportFenceSciSyncInfo); + } +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + VkResult importFenceSciSyncObjNV(const VkImportFenceSciSyncInfoNV* pImportFenceSciSyncInfo) const noexcept { + return fp_vkImportFenceSciSyncObjNV(device, pImportFenceSciSyncInfo); + } +#endif +#if (defined(VK_NV_external_sci_sync)) + VkResult getSemaphoreSciSyncObjNV(const VkSemaphoreGetSciSyncInfoNV* pGetSciSyncInfo, void* pHandle) const noexcept { + return fp_vkGetSemaphoreSciSyncObjNV(device, pGetSciSyncInfo, pHandle); + } +#endif +#if (defined(VK_NV_external_sci_sync)) + VkResult importSemaphoreSciSyncObjNV(const VkImportSemaphoreSciSyncInfoNV* pImportSemaphoreSciSyncInfo) const noexcept { + return fp_vkImportSemaphoreSciSyncObjNV(device, pImportSemaphoreSciSyncInfo); + } +#endif +#if (defined(VK_NV_external_sci_sync2)) + VkResult createSemaphoreSciSyncPoolNV(const VkSemaphoreSciSyncPoolCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphoreSciSyncPoolNV* pSemaphorePool) const noexcept { + return fp_vkCreateSemaphoreSciSyncPoolNV(device, pCreateInfo, pAllocator, pSemaphorePool); + } +#endif +#if (defined(VK_NV_external_sci_sync2)) + void destroySemaphoreSciSyncPoolNV(VkSemaphoreSciSyncPoolNV semaphorePool, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySemaphoreSciSyncPoolNV(device, semaphorePool, pAllocator); + } +#endif +#if (defined(VK_EXT_display_control)) + VkResult displayPowerControlEXT(VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) const noexcept { + return fp_vkDisplayPowerControlEXT(device, display, pDisplayPowerInfo); + } +#endif +#if (defined(VK_EXT_display_control)) + VkResult registerDeviceEventEXT(const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const noexcept { + return fp_vkRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence); + } +#endif +#if (defined(VK_EXT_display_control)) + VkResult registerDisplayEventEXT(VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const noexcept { + return fp_vkRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence); + } +#endif +#if (defined(VK_EXT_display_control)) + VkResult getSwapchainCounterEXT(VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) const noexcept { + return fp_vkGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getDeviceGroupPeerMemoryFeatures(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHR* pPeerMemoryFeatures) const noexcept { + fp_vkGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + } +#endif +#if (defined(VK_VERSION_1_1)) + VkResult bindBufferMemory2(uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos) const noexcept { + return fp_vkBindBufferMemory2(device, bindInfoCount, pBindInfos); + } +#endif +#if (defined(VK_VERSION_1_1)) + VkResult bindImageMemory2(uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const noexcept { + return fp_vkBindImageMemory2(device, bindInfoCount, pBindInfos); + } +#endif +#if (defined(VK_VERSION_1_1)) + void cmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const noexcept { + fp_vkCmdSetDeviceMask(commandBuffer, deviceMask); + } +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + VkResult getDeviceGroupPresentCapabilitiesKHR(VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) const noexcept { + return fp_vkGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities); + } +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + VkResult getDeviceGroupSurfacePresentModesKHR(VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) const noexcept { + return fp_vkGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes); + } +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + VkResult acquireNextImage2KHR(const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) const noexcept { + return fp_vkAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex); + } +#endif +#if (defined(VK_VERSION_1_1)) + void cmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + fp_vkCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + } +#endif +#if (defined(VK_VERSION_1_1)) + VkResult createDescriptorUpdateTemplate(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const noexcept { + return fp_vkCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); + } +#endif +#if (defined(VK_VERSION_1_1)) + void destroyDescriptorUpdateTemplate(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator); + } +#endif +#if (defined(VK_VERSION_1_1)) + void updateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData) const noexcept { + fp_vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData); + } +#endif +#if (defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_descriptor_update_template)) + void cmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) const noexcept { + fp_vkCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData); + } +#endif +#if (defined(VK_EXT_hdr_metadata)) + void setHdrMetadataEXT(uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) const noexcept { + fp_vkSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata); + } +#endif +#if (defined(VK_KHR_shared_presentable_image)) + VkResult getSwapchainStatusKHR(VkSwapchainKHR swapchain) const noexcept { + return fp_vkGetSwapchainStatusKHR(device, swapchain); + } +#endif +#if (defined(VK_GOOGLE_display_timing)) + VkResult getRefreshCycleDurationGOOGLE(VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) const noexcept { + return fp_vkGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties); + } +#endif +#if (defined(VK_GOOGLE_display_timing)) + VkResult getPastPresentationTimingGOOGLE(VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) const noexcept { + return fp_vkGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings); + } +#endif +#if (defined(VK_NV_clip_space_w_scaling)) + void cmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) const noexcept { + fp_vkCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings); + } +#endif +#if (defined(VK_EXT_discard_rectangles)) + void cmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) const noexcept { + fp_vkCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles); + } +#endif +#if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 + void cmdSetDiscardRectangleEnableEXT(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable) const noexcept { + fp_vkCmdSetDiscardRectangleEnableEXT(commandBuffer, discardRectangleEnable); + } +#endif +#if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 + void cmdSetDiscardRectangleModeEXT(VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode) const noexcept { + fp_vkCmdSetDiscardRectangleModeEXT(commandBuffer, discardRectangleMode); + } +#endif +#if (defined(VK_EXT_sample_locations)) + void cmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) const noexcept { + fp_vkCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getBufferMemoryRequirements2(const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getImageSparseMemoryRequirements2(const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { + fp_vkGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } +#endif +#if (defined(VK_VERSION_1_3)) + void getDeviceBufferMemoryRequirements(const VkDeviceBufferMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetDeviceBufferMemoryRequirements(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_VERSION_1_3)) + void getDeviceImageMemoryRequirements(const VkDeviceImageMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetDeviceImageMemoryRequirements(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_VERSION_1_3)) + void getDeviceImageSparseMemoryRequirements(const VkDeviceImageMemoryRequirementsKHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { + fp_vkGetDeviceImageSparseMemoryRequirements(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } +#endif +#if (defined(VK_VERSION_1_1)) + VkResult createSamplerYcbcrConversion(const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversionKHR* pYcbcrConversion) const noexcept { + return fp_vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion); + } +#endif +#if (defined(VK_VERSION_1_1)) + void destroySamplerYcbcrConversion(VkSamplerYcbcrConversionKHR ycbcrConversion, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getDeviceQueue2(const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) const noexcept { + fp_vkGetDeviceQueue2(device, pQueueInfo, pQueue); + } +#endif +#if (defined(VK_EXT_validation_cache)) + VkResult createValidationCacheEXT(const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) const noexcept { + return fp_vkCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache); + } +#endif +#if (defined(VK_EXT_validation_cache)) + void destroyValidationCacheEXT(VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyValidationCacheEXT(device, validationCache, pAllocator); + } +#endif +#if (defined(VK_EXT_validation_cache)) + VkResult getValidationCacheDataEXT(VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) const noexcept { + return fp_vkGetValidationCacheDataEXT(device, validationCache, pDataSize, pData); + } +#endif +#if (defined(VK_EXT_validation_cache)) + VkResult mergeValidationCachesEXT(VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) const noexcept { + return fp_vkMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches); + } +#endif +#if (defined(VK_VERSION_1_1)) + void getDescriptorSetLayoutSupport(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupportKHR* pSupport) const noexcept { + fp_vkGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport); + } +#endif +#if (defined(VK_ANDROID_native_buffer)) + VkResult getSwapchainGrallocUsageANDROID(VkFormat format, VkImageUsageFlags imageUsage, int* grallocUsage) const noexcept { + return fp_vkGetSwapchainGrallocUsageANDROID(device, format, imageUsage, grallocUsage); + } +#endif +#if (defined(VK_ANDROID_native_buffer)) + VkResult getSwapchainGrallocUsage2ANDROID(VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage, uint64_t* grallocProducerUsage) const noexcept { + return fp_vkGetSwapchainGrallocUsage2ANDROID(device, format, imageUsage, swapchainImageUsage, grallocConsumerUsage, grallocProducerUsage); + } +#endif +#if (defined(VK_ANDROID_native_buffer)) + VkResult acquireImageANDROID(VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence) const noexcept { + return fp_vkAcquireImageANDROID(device, image, nativeFenceFd, semaphore, fence); + } +#endif +#if (defined(VK_ANDROID_native_buffer)) + VkResult queueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) const noexcept { + return fp_vkQueueSignalReleaseImageANDROID(queue, waitSemaphoreCount, pWaitSemaphores, image, pNativeFenceFd); + } +#endif +#if (defined(VK_AMD_shader_info)) + VkResult getShaderInfoAMD(VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) const noexcept { + return fp_vkGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo); + } +#endif +#if (defined(VK_AMD_display_native_hdr)) + void setLocalDimmingAMD(VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) const noexcept { + fp_vkSetLocalDimmingAMD(device, swapChain, localDimmingEnable); + } +#endif +#if (defined(VK_KHR_calibrated_timestamps)) + VkResult getCalibratedTimestampsKHR(uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) const noexcept { + return fp_vkGetCalibratedTimestampsKHR(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); + } +#endif +#if (defined(VK_EXT_debug_utils)) + VkResult setDebugUtilsObjectNameEXT(const VkDebugUtilsObjectNameInfoEXT* pNameInfo) const noexcept { + return fp_vkSetDebugUtilsObjectNameEXT(device, pNameInfo); + } +#endif +#if (defined(VK_EXT_debug_utils)) + VkResult setDebugUtilsObjectTagEXT(const VkDebugUtilsObjectTagInfoEXT* pTagInfo) const noexcept { + return fp_vkSetDebugUtilsObjectTagEXT(device, pTagInfo); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void queueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + fp_vkQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void queueEndDebugUtilsLabelEXT(VkQueue queue) const noexcept { + fp_vkQueueEndDebugUtilsLabelEXT(queue); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void queueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + fp_vkQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void cmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + fp_vkCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void cmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdEndDebugUtilsLabelEXT(commandBuffer); + } +#endif +#if (defined(VK_EXT_debug_utils)) + void cmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + fp_vkCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo); + } +#endif +#if (defined(VK_EXT_external_memory_host)) + VkResult getMemoryHostPointerPropertiesEXT(VkExternalMemoryHandleTypeFlagBitsKHR handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) const noexcept { + return fp_vkGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties); + } +#endif +#if (defined(VK_AMD_buffer_marker)) + void cmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const noexcept { + fp_vkCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker); + } +#endif +#if (defined(VK_VERSION_1_2)) + VkResult createRenderPass2(const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const noexcept { + return fp_vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass); + } +#endif +#if (defined(VK_VERSION_1_2)) + void cmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const noexcept { + fp_vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); + } +#endif +#if (defined(VK_VERSION_1_2)) + void cmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + fp_vkCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); + } +#endif +#if (defined(VK_VERSION_1_2)) + void cmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + fp_vkCmdEndRenderPass2(commandBuffer, pSubpassEndInfo); + } +#endif +#if (defined(VK_VERSION_1_2)) + VkResult getSemaphoreCounterValue(VkSemaphore semaphore, uint64_t* pValue) const noexcept { + return fp_vkGetSemaphoreCounterValue(device, semaphore, pValue); + } +#endif +#if (defined(VK_VERSION_1_2)) + VkResult waitSemaphores(const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const noexcept { + return fp_vkWaitSemaphores(device, pWaitInfo, timeout); + } +#endif +#if (defined(VK_VERSION_1_2)) + VkResult signalSemaphore(const VkSemaphoreSignalInfoKHR* pSignalInfo) const noexcept { + return fp_vkSignalSemaphore(device, pSignalInfo); + } +#endif +#if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) + VkResult getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) const noexcept { + return fp_vkGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties); + } +#endif +#if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) + VkResult getMemoryAndroidHardwareBufferANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) const noexcept { + return fp_vkGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer); + } +#endif +#if (defined(VK_VERSION_1_2)) + void cmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } +#endif +#if (defined(VK_VERSION_1_2)) + void cmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } +#endif +#if (defined(VK_NV_device_diagnostic_checkpoints)) + void cmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) const noexcept { + fp_vkCmdSetCheckpointNV(commandBuffer, pCheckpointMarker); + } +#endif +#if (defined(VK_NV_device_diagnostic_checkpoints)) + void getQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) const noexcept { + fp_vkGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData); + } +#endif +#if (defined(VK_EXT_transform_feedback)) + void cmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) const noexcept { + fp_vkCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes); + } +#endif +#if (defined(VK_EXT_transform_feedback)) + void cmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) const noexcept { + fp_vkCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); + } +#endif +#if (defined(VK_EXT_transform_feedback)) + void cmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) const noexcept { + fp_vkCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); + } +#endif +#if (defined(VK_EXT_transform_feedback)) + void cmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) const noexcept { + fp_vkCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index); + } +#endif +#if (defined(VK_EXT_transform_feedback)) + void cmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) const noexcept { + fp_vkCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index); + } +#endif +#if (defined(VK_EXT_transform_feedback)) + void cmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) const noexcept { + fp_vkCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride); + } +#endif +#if (defined(VK_NV_scissor_exclusive)) + void cmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) const noexcept { + fp_vkCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors); + } +#endif +#if ((defined(VK_NV_scissor_exclusive))) && VK_HEADER_VERSION >= 241 + void cmdSetExclusiveScissorEnableNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkBool32* pExclusiveScissorEnables) const noexcept { + fp_vkCmdSetExclusiveScissorEnableNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables); + } +#endif +#if (defined(VK_NV_shading_rate_image)) + void cmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) const noexcept { + fp_vkCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout); + } +#endif +#if (defined(VK_NV_shading_rate_image)) + void cmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) const noexcept { + fp_vkCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes); + } +#endif +#if (defined(VK_NV_shading_rate_image)) + void cmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) const noexcept { + fp_vkCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders); + } +#endif +#if (defined(VK_NV_mesh_shader)) + void cmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const noexcept { + fp_vkCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask); + } +#endif +#if (defined(VK_NV_mesh_shader)) + void cmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride); + } +#endif +#if (defined(VK_NV_mesh_shader)) + void cmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } +#endif +#if (defined(VK_EXT_mesh_shader)) + void cmdDrawMeshTasksEXT(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + fp_vkCmdDrawMeshTasksEXT(commandBuffer, groupCountX, groupCountY, groupCountZ); + } +#endif +#if (defined(VK_EXT_mesh_shader)) + void cmdDrawMeshTasksIndirectEXT(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawMeshTasksIndirectEXT(commandBuffer, buffer, offset, drawCount, stride); + } +#endif +#if (defined(VK_EXT_mesh_shader)) + void cmdDrawMeshTasksIndirectCountEXT(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawMeshTasksIndirectCountEXT(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } +#endif +#if (defined(VK_NV_ray_tracing)) + VkResult compileDeferredNV(VkPipeline pipeline, uint32_t shader) const noexcept { + return fp_vkCompileDeferredNV(device, pipeline, shader); + } +#endif +#if (defined(VK_NV_ray_tracing)) + VkResult createAccelerationStructureNV(const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) const noexcept { + return fp_vkCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure); + } +#endif +#if (defined(VK_HUAWEI_invocation_mask)) + void cmdBindInvocationMaskHUAWEI(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) const noexcept { + fp_vkCmdBindInvocationMaskHUAWEI(commandBuffer, imageView, imageLayout); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void destroyAccelerationStructureKHR(VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator); + } +#endif +#if (defined(VK_NV_ray_tracing)) + void destroyAccelerationStructureNV(VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator); + } +#endif +#if (defined(VK_NV_ray_tracing)) + void getAccelerationStructureMemoryRequirementsNV(const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_NV_ray_tracing)) + VkResult bindAccelerationStructureMemoryNV(uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const noexcept { + return fp_vkBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos); + } +#endif +#if (defined(VK_NV_ray_tracing)) + void cmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) const noexcept { + fp_vkCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void cmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo) const noexcept { + fp_vkCmdCopyAccelerationStructureKHR(commandBuffer, pInfo); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkResult copyAccelerationStructureKHR(VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo) const noexcept { + return fp_vkCopyAccelerationStructureKHR(device, deferredOperation, pInfo); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void cmdCopyAccelerationStructureToMemoryKHR(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) const noexcept { + fp_vkCmdCopyAccelerationStructureToMemoryKHR(commandBuffer, pInfo); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkResult copyAccelerationStructureToMemoryKHR(VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) const noexcept { + return fp_vkCopyAccelerationStructureToMemoryKHR(device, deferredOperation, pInfo); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void cmdCopyMemoryToAccelerationStructureKHR(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) const noexcept { + fp_vkCmdCopyMemoryToAccelerationStructureKHR(commandBuffer, pInfo); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkResult copyMemoryToAccelerationStructureKHR(VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) const noexcept { + return fp_vkCopyMemoryToAccelerationStructureKHR(device, deferredOperation, pInfo); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void cmdWriteAccelerationStructuresPropertiesKHR(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const noexcept { + fp_vkCmdWriteAccelerationStructuresPropertiesKHR(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); + } +#endif +#if (defined(VK_NV_ray_tracing)) + void cmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const noexcept { + fp_vkCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); + } +#endif +#if (defined(VK_NV_ray_tracing)) + void cmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) const noexcept { + fp_vkCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkResult writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride) const noexcept { + return fp_vkWriteAccelerationStructuresPropertiesKHR(device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + void cmdTraceRaysKHR(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth) const noexcept { + fp_vkCmdTraceRaysKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth); + } +#endif +#if (defined(VK_NV_ray_tracing)) + void cmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) const noexcept { + fp_vkCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + VkResult getRayTracingShaderGroupHandlesKHR(VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept { + return fp_vkGetRayTracingShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + VkResult getRayTracingCaptureReplayShaderGroupHandlesKHR(VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept { + return fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); + } +#endif +#if (defined(VK_NV_ray_tracing)) + VkResult getAccelerationStructureHandleNV(VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) const noexcept { + return fp_vkGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData); + } +#endif +#if (defined(VK_NV_ray_tracing)) + VkResult createRayTracingPipelinesNV(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { + return fp_vkCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + VkResult createRayTracingPipelinesKHR(VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { + return fp_vkCreateRayTracingPipelinesKHR(device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + void cmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress) const noexcept { + fp_vkCmdTraceRaysIndirectKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress); + } +#endif +#if (defined(VK_KHR_ray_tracing_maintenance1)) + void cmdTraceRaysIndirect2KHR(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress) const noexcept { + fp_vkCmdTraceRaysIndirect2KHR(commandBuffer, indirectDeviceAddress); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void getDeviceAccelerationStructureCompatibilityKHR(const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility) const noexcept { + fp_vkGetDeviceAccelerationStructureCompatibilityKHR(device, pVersionInfo, pCompatibility); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + VkDeviceSize getRayTracingShaderGroupStackSizeKHR(VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader) const noexcept { + return fp_vkGetRayTracingShaderGroupStackSizeKHR(device, pipeline, group, groupShader); + } +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + void cmdSetRayTracingPipelineStackSizeKHR(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize) const noexcept { + fp_vkCmdSetRayTracingPipelineStackSizeKHR(commandBuffer, pipelineStackSize); + } +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + VkResult getDeviceGroupSurfacePresentModes2EXT(const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) const noexcept { + return fp_vkGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes); + } +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + VkResult acquireFullScreenExclusiveModeEXT(VkSwapchainKHR swapchain) const noexcept { + return fp_vkAcquireFullScreenExclusiveModeEXT(device, swapchain); + } +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + VkResult releaseFullScreenExclusiveModeEXT(VkSwapchainKHR swapchain) const noexcept { + return fp_vkReleaseFullScreenExclusiveModeEXT(device, swapchain); + } +#endif +#if (defined(VK_KHR_performance_query)) + VkResult acquireProfilingLockKHR(const VkAcquireProfilingLockInfoKHR* pInfo) const noexcept { + return fp_vkAcquireProfilingLockKHR(device, pInfo); + } +#endif +#if (defined(VK_KHR_performance_query)) + void releaseProfilingLockKHR() const noexcept { + fp_vkReleaseProfilingLockKHR(device); + } +#endif +#if (defined(VK_EXT_image_drm_format_modifier)) + VkResult getImageDrmFormatModifierPropertiesEXT(VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) const noexcept { + return fp_vkGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties); + } +#endif +#if (defined(VK_VERSION_1_2)) + uint64_t getBufferOpaqueCaptureAddress(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + return fp_vkGetBufferOpaqueCaptureAddress(device, pInfo); + } +#endif +#if (defined(VK_VERSION_1_2)) + VkDeviceAddress getBufferDeviceAddress(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + return fp_vkGetBufferDeviceAddress(device, pInfo); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult initializePerformanceApiINTEL(const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) const noexcept { + return fp_vkInitializePerformanceApiINTEL(device, pInitializeInfo); + } +#endif +#if (defined(VK_INTEL_performance_query)) + void uninitializePerformanceApiINTEL() const noexcept { + fp_vkUninitializePerformanceApiINTEL(device); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult cmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) const noexcept { + return fp_vkCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult cmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) const noexcept { + return fp_vkCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult cmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) const noexcept { + return fp_vkCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult acquirePerformanceConfigurationINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) const noexcept { + return fp_vkAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult releasePerformanceConfigurationINTEL(VkPerformanceConfigurationINTEL configuration) const noexcept { + return fp_vkReleasePerformanceConfigurationINTEL(device, configuration); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult queueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) const noexcept { + return fp_vkQueueSetPerformanceConfigurationINTEL(queue, configuration); + } +#endif +#if (defined(VK_INTEL_performance_query)) + VkResult getPerformanceParameterINTEL(VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) const noexcept { + return fp_vkGetPerformanceParameterINTEL(device, parameter, pValue); + } +#endif +#if (defined(VK_VERSION_1_2)) + uint64_t getDeviceMemoryOpaqueCaptureAddress(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const noexcept { + return fp_vkGetDeviceMemoryOpaqueCaptureAddress(device, pInfo); + } +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + VkResult getPipelineExecutablePropertiesKHR(const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) const noexcept { + return fp_vkGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties); + } +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + VkResult getPipelineExecutableStatisticsKHR(const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) const noexcept { + return fp_vkGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics); + } +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + VkResult getPipelineExecutableInternalRepresentationsKHR(const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const noexcept { + return fp_vkGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations); + } +#endif +#if (defined(VK_KHR_line_rasterization)) + void cmdSetLineStippleKHR(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) const noexcept { + fp_vkCmdSetLineStippleKHR(commandBuffer, lineStippleFactor, lineStipplePattern); + } +#endif +#if (defined(VKSC_VERSION_1_0)) + VkResult getFaultData(VkFaultQueryBehavior faultQueryBehavior, VkBool32* pUnrecordedFaults, uint32_t* pFaultCount, VkFaultData* pFaults) const noexcept { + return fp_vkGetFaultData(device, faultQueryBehavior, pUnrecordedFaults, pFaultCount, pFaults); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkResult createAccelerationStructureKHR(const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure) const noexcept { + return fp_vkCreateAccelerationStructureKHR(device, pCreateInfo, pAllocator, pAccelerationStructure); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void cmdBuildAccelerationStructuresKHR(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) const noexcept { + fp_vkCmdBuildAccelerationStructuresKHR(commandBuffer, infoCount, pInfos, ppBuildRangeInfos); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void cmdBuildAccelerationStructuresIndirectKHR(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts) const noexcept { + fp_vkCmdBuildAccelerationStructuresIndirectKHR(commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkResult buildAccelerationStructuresKHR(VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) const noexcept { + return fp_vkBuildAccelerationStructuresKHR(device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + VkDeviceAddress getAccelerationStructureDeviceAddressKHR(const VkAccelerationStructureDeviceAddressInfoKHR* pInfo) const noexcept { + return fp_vkGetAccelerationStructureDeviceAddressKHR(device, pInfo); + } +#endif +#if (defined(VK_KHR_deferred_host_operations)) + VkResult createDeferredOperationKHR(const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation) const noexcept { + return fp_vkCreateDeferredOperationKHR(device, pAllocator, pDeferredOperation); + } +#endif +#if (defined(VK_KHR_deferred_host_operations)) + void destroyDeferredOperationKHR(VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDeferredOperationKHR(device, operation, pAllocator); + } +#endif +#if (defined(VK_KHR_deferred_host_operations)) + uint32_t getDeferredOperationMaxConcurrencyKHR(VkDeferredOperationKHR operation) const noexcept { + return fp_vkGetDeferredOperationMaxConcurrencyKHR(device, operation); + } +#endif +#if (defined(VK_KHR_deferred_host_operations)) + VkResult getDeferredOperationResultKHR(VkDeferredOperationKHR operation) const noexcept { + return fp_vkGetDeferredOperationResultKHR(device, operation); + } +#endif +#if (defined(VK_KHR_deferred_host_operations)) + VkResult deferredOperationJoinKHR(VkDeferredOperationKHR operation) const noexcept { + return fp_vkDeferredOperationJoinKHR(device, operation); + } +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + void getPipelineIndirectMemoryRequirementsNV(const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetPipelineIndirectMemoryRequirementsNV(device, pCreateInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + VkDeviceAddress getPipelineIndirectDeviceAddressNV(const VkPipelineIndirectDeviceAddressInfoNV* pInfo) const noexcept { + return fp_vkGetPipelineIndirectDeviceAddressNV(device, pInfo); + } +#endif +#if (defined(VK_AMD_anti_lag)) + void antiLagUpdateAMD(const VkAntiLagDataAMD* pData) const noexcept { + fp_vkAntiLagUpdateAMD(device, pData); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const noexcept { + fp_vkCmdSetCullMode(commandBuffer, cullMode); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const noexcept { + fp_vkCmdSetFrontFace(commandBuffer, frontFace); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetPrimitiveTopology(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) const noexcept { + fp_vkCmdSetPrimitiveTopology(commandBuffer, primitiveTopology); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetViewportWithCount(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports) const noexcept { + fp_vkCmdSetViewportWithCount(commandBuffer, viewportCount, pViewports); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetScissorWithCount(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors) const noexcept { + fp_vkCmdSetScissorWithCount(commandBuffer, scissorCount, pScissors); + } +#endif +#if (defined(VK_KHR_maintenance5)) + void cmdBindIndexBuffer2KHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType) const noexcept { + fp_vkCmdBindIndexBuffer2KHR(commandBuffer, buffer, offset, size, indexType); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdBindVertexBuffers2(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides) const noexcept { + fp_vkCmdBindVertexBuffers2(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetDepthTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const noexcept { + fp_vkCmdSetDepthTestEnable(commandBuffer, depthTestEnable); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetDepthWriteEnable(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const noexcept { + fp_vkCmdSetDepthWriteEnable(commandBuffer, depthWriteEnable); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetDepthCompareOp(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const noexcept { + fp_vkCmdSetDepthCompareOp(commandBuffer, depthCompareOp); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) const noexcept { + fp_vkCmdSetDepthBoundsTestEnable(commandBuffer, depthBoundsTestEnable); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetStencilTestEnable(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const noexcept { + fp_vkCmdSetStencilTestEnable(commandBuffer, stencilTestEnable); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetStencilOp(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp) const noexcept { + fp_vkCmdSetStencilOp(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + void cmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) const noexcept { + fp_vkCmdSetPatchControlPointsEXT(commandBuffer, patchControlPoints); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable) const noexcept { + fp_vkCmdSetRasterizerDiscardEnable(commandBuffer, rasterizerDiscardEnable); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetDepthBiasEnable(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) const noexcept { + fp_vkCmdSetDepthBiasEnable(commandBuffer, depthBiasEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + void cmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) const noexcept { + fp_vkCmdSetLogicOpEXT(commandBuffer, logicOp); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable) const noexcept { + fp_vkCmdSetPrimitiveRestartEnable(commandBuffer, primitiveRestartEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer, VkTessellationDomainOriginKHR domainOrigin) const noexcept { + fp_vkCmdSetTessellationDomainOriginEXT(commandBuffer, domainOrigin); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable) const noexcept { + fp_vkCmdSetDepthClampEnableEXT(commandBuffer, depthClampEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetPolygonModeEXT(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode) const noexcept { + fp_vkCmdSetPolygonModeEXT(commandBuffer, polygonMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples) const noexcept { + fp_vkCmdSetRasterizationSamplesEXT(commandBuffer, rasterizationSamples); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetSampleMaskEXT(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask) const noexcept { + fp_vkCmdSetSampleMaskEXT(commandBuffer, samples, pSampleMask); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable) const noexcept { + fp_vkCmdSetAlphaToCoverageEnableEXT(commandBuffer, alphaToCoverageEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable) const noexcept { + fp_vkCmdSetAlphaToOneEnableEXT(commandBuffer, alphaToOneEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable) const noexcept { + fp_vkCmdSetLogicOpEnableEXT(commandBuffer, logicOpEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables) const noexcept { + fp_vkCmdSetColorBlendEnableEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetColorBlendEquationEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations) const noexcept { + fp_vkCmdSetColorBlendEquationEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks) const noexcept { + fp_vkCmdSetColorWriteMaskEXT(commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer, uint32_t rasterizationStream) const noexcept { + fp_vkCmdSetRasterizationStreamEXT(commandBuffer, rasterizationStream); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetConservativeRasterizationModeEXT(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode) const noexcept { + fp_vkCmdSetConservativeRasterizationModeEXT(commandBuffer, conservativeRasterizationMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetExtraPrimitiveOverestimationSizeEXT(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize) const noexcept { + fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT(commandBuffer, extraPrimitiveOverestimationSize); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable) const noexcept { + fp_vkCmdSetDepthClipEnableEXT(commandBuffer, depthClipEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable) const noexcept { + fp_vkCmdSetSampleLocationsEnableEXT(commandBuffer, sampleLocationsEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetColorBlendAdvancedEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced) const noexcept { + fp_vkCmdSetColorBlendAdvancedEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode) const noexcept { + fp_vkCmdSetProvokingVertexModeEXT(commandBuffer, provokingVertexMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetLineRasterizationModeEXT(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode) const noexcept { + fp_vkCmdSetLineRasterizationModeEXT(commandBuffer, lineRasterizationMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable) const noexcept { + fp_vkCmdSetLineStippleEnableEXT(commandBuffer, stippledLineEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne) const noexcept { + fp_vkCmdSetDepthClipNegativeOneToOneEXT(commandBuffer, negativeOneToOne); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetViewportWScalingEnableNV(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable) const noexcept { + fp_vkCmdSetViewportWScalingEnableNV(commandBuffer, viewportWScalingEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetViewportSwizzleNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles) const noexcept { + fp_vkCmdSetViewportSwizzleNV(commandBuffer, firstViewport, viewportCount, pViewportSwizzles); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetCoverageToColorEnableNV(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable) const noexcept { + fp_vkCmdSetCoverageToColorEnableNV(commandBuffer, coverageToColorEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetCoverageToColorLocationNV(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation) const noexcept { + fp_vkCmdSetCoverageToColorLocationNV(commandBuffer, coverageToColorLocation); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetCoverageModulationModeNV(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode) const noexcept { + fp_vkCmdSetCoverageModulationModeNV(commandBuffer, coverageModulationMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetCoverageModulationTableEnableNV(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable) const noexcept { + fp_vkCmdSetCoverageModulationTableEnableNV(commandBuffer, coverageModulationTableEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetCoverageModulationTableNV(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable) const noexcept { + fp_vkCmdSetCoverageModulationTableNV(commandBuffer, coverageModulationTableCount, pCoverageModulationTable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetShadingRateImageEnableNV(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable) const noexcept { + fp_vkCmdSetShadingRateImageEnableNV(commandBuffer, shadingRateImageEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetCoverageReductionModeNV(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode) const noexcept { + fp_vkCmdSetCoverageReductionModeNV(commandBuffer, coverageReductionMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + void cmdSetRepresentativeFragmentTestEnableNV(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable) const noexcept { + fp_vkCmdSetRepresentativeFragmentTestEnableNV(commandBuffer, representativeFragmentTestEnable); + } +#endif +#if (defined(VK_VERSION_1_3)) + VkResult createPrivateDataSlot(const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot) const noexcept { + return fp_vkCreatePrivateDataSlot(device, pCreateInfo, pAllocator, pPrivateDataSlot); + } +#endif +#if (defined(VK_VERSION_1_3)) + void destroyPrivateDataSlot(VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyPrivateDataSlot(device, privateDataSlot, pAllocator); + } +#endif +#if (defined(VK_VERSION_1_3)) + VkResult setPrivateData(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data) const noexcept { + return fp_vkSetPrivateData(device, objectType, objectHandle, privateDataSlot, data); + } +#endif +#if (defined(VK_VERSION_1_3)) + void getPrivateData(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData) const noexcept { + fp_vkGetPrivateData(device, objectType, objectHandle, privateDataSlot, pData); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) const noexcept { + fp_vkCmdCopyBuffer2(commandBuffer, pCopyBufferInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) const noexcept { + fp_vkCmdCopyImage2(commandBuffer, pCopyImageInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) const noexcept { + fp_vkCmdBlitImage2(commandBuffer, pBlitImageInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdCopyBufferToImage2(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) const noexcept { + fp_vkCmdCopyBufferToImage2(commandBuffer, pCopyBufferToImageInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) const noexcept { + fp_vkCmdCopyImageToBuffer2(commandBuffer, pCopyImageToBufferInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo) const noexcept { + fp_vkCmdResolveImage2(commandBuffer, pResolveImageInfo); + } +#endif +#if (defined(VK_KHR_object_refresh)) + void cmdRefreshObjectsKHR(VkCommandBuffer commandBuffer, const VkRefreshObjectListKHR* pRefreshObjects) const noexcept { + fp_vkCmdRefreshObjectsKHR(commandBuffer, pRefreshObjects); + } +#endif +#if (defined(VK_KHR_fragment_shading_rate)) + void cmdSetFragmentShadingRateKHR(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) const noexcept { + fp_vkCmdSetFragmentShadingRateKHR(commandBuffer, pFragmentSize, combinerOps); + } +#endif +#if (defined(VK_NV_fragment_shading_rate_enums)) + void cmdSetFragmentShadingRateEnumNV(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) const noexcept { + fp_vkCmdSetFragmentShadingRateEnumNV(commandBuffer, shadingRate, combinerOps); + } +#endif +#if (defined(VK_KHR_acceleration_structure)) + void getAccelerationStructureBuildSizesKHR(VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo) const noexcept { + fp_vkGetAccelerationStructureBuildSizesKHR(device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo); + } +#endif +#if (defined(VK_EXT_vertex_input_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetVertexInputEXT(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions) const noexcept { + fp_vkCmdSetVertexInputEXT(commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions); + } +#endif +#if (defined(VK_EXT_color_write_enable)) + void cmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables) const noexcept { + fp_vkCmdSetColorWriteEnableEXT(commandBuffer, attachmentCount, pColorWriteEnables); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + fp_vkCmdSetEvent2(commandBuffer, event, pDependencyInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) const noexcept { + fp_vkCmdResetEvent2(commandBuffer, event, stageMask); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos) const noexcept { + fp_vkCmdWaitEvents2(commandBuffer, eventCount, pEvents, pDependencyInfos); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + fp_vkCmdPipelineBarrier2(commandBuffer, pDependencyInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + VkResult queueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence) const noexcept { + return fp_vkQueueSubmit2(queue, submitCount, pSubmits, fence); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdWriteTimestamp2(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query) const noexcept { + fp_vkCmdWriteTimestamp2(commandBuffer, stage, queryPool, query); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void cmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const noexcept { + fp_vkCmdWriteBufferMarker2AMD(commandBuffer, stage, dstBuffer, dstOffset, marker); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void getQueueCheckpointData2NV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData) const noexcept { + fp_vkGetQueueCheckpointData2NV(queue, pCheckpointDataCount, pCheckpointData); + } +#endif +#if (defined(VK_EXT_host_image_copy)) + VkResult copyMemoryToImageEXT(const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo) const noexcept { + return fp_vkCopyMemoryToImageEXT(device, pCopyMemoryToImageInfo); + } +#endif +#if (defined(VK_EXT_host_image_copy)) + VkResult copyImageToMemoryEXT(const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo) const noexcept { + return fp_vkCopyImageToMemoryEXT(device, pCopyImageToMemoryInfo); + } +#endif +#if (defined(VK_EXT_host_image_copy)) + VkResult copyImageToImageEXT(const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo) const noexcept { + return fp_vkCopyImageToImageEXT(device, pCopyImageToImageInfo); + } +#endif +#if (defined(VK_EXT_host_image_copy)) + VkResult transitionImageLayoutEXT(uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions) const noexcept { + return fp_vkTransitionImageLayoutEXT(device, transitionCount, pTransitions); + } +#endif +#if (defined(VKSC_VERSION_1_0)) + void getCommandPoolMemoryConsumption(VkCommandPool commandPool, VkCommandBuffer commandBuffer, VkCommandPoolMemoryConsumption* pConsumption) const noexcept { + fp_vkGetCommandPoolMemoryConsumption(device, commandPool, commandBuffer, pConsumption); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult createVideoSessionKHR(const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession) const noexcept { + return fp_vkCreateVideoSessionKHR(device, pCreateInfo, pAllocator, pVideoSession); + } +#endif +#if (defined(VK_KHR_video_queue)) + void destroyVideoSessionKHR(VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyVideoSessionKHR(device, videoSession, pAllocator); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult createVideoSessionParametersKHR(const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters) const noexcept { + return fp_vkCreateVideoSessionParametersKHR(device, pCreateInfo, pAllocator, pVideoSessionParameters); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult updateVideoSessionParametersKHR(VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo) const noexcept { + return fp_vkUpdateVideoSessionParametersKHR(device, videoSessionParameters, pUpdateInfo); + } +#endif +#if (defined(VK_KHR_video_encode_queue)) + VkResult getEncodedVideoSessionParametersKHR(const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, size_t* pDataSize, void* pData) const noexcept { + return fp_vkGetEncodedVideoSessionParametersKHR(device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData); + } +#endif +#if (defined(VK_KHR_video_queue)) + void destroyVideoSessionParametersKHR(VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyVideoSessionParametersKHR(device, videoSessionParameters, pAllocator); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult getVideoSessionMemoryRequirementsKHR(VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements) const noexcept { + return fp_vkGetVideoSessionMemoryRequirementsKHR(device, videoSession, pMemoryRequirementsCount, pMemoryRequirements); + } +#endif +#if (defined(VK_KHR_video_queue)) + VkResult bindVideoSessionMemoryKHR(VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos) const noexcept { + return fp_vkBindVideoSessionMemoryKHR(device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos); + } +#endif +#if (defined(VK_KHR_video_decode_queue)) + void cmdDecodeVideoKHR(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo) const noexcept { + fp_vkCmdDecodeVideoKHR(commandBuffer, pDecodeInfo); + } +#endif +#if (defined(VK_KHR_video_queue)) + void cmdBeginVideoCodingKHR(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo) const noexcept { + fp_vkCmdBeginVideoCodingKHR(commandBuffer, pBeginInfo); + } +#endif +#if (defined(VK_KHR_video_queue)) + void cmdControlVideoCodingKHR(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo) const noexcept { + fp_vkCmdControlVideoCodingKHR(commandBuffer, pCodingControlInfo); + } +#endif +#if (defined(VK_KHR_video_queue)) + void cmdEndVideoCodingKHR(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo) const noexcept { + fp_vkCmdEndVideoCodingKHR(commandBuffer, pEndCodingInfo); + } +#endif +#if (defined(VK_KHR_video_encode_queue)) + void cmdEncodeVideoKHR(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo) const noexcept { + fp_vkCmdEncodeVideoKHR(commandBuffer, pEncodeInfo); + } +#endif +#if (defined(VK_NV_memory_decompression)) + void cmdDecompressMemoryNV(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions) const noexcept { + fp_vkCmdDecompressMemoryNV(commandBuffer, decompressRegionCount, pDecompressMemoryRegions); + } +#endif +#if (defined(VK_NV_memory_decompression)) + void cmdDecompressMemoryIndirectCountNV(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride) const noexcept { + fp_vkCmdDecompressMemoryIndirectCountNV(commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + void getDescriptorSetLayoutSizeEXT(VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes) const noexcept { + fp_vkGetDescriptorSetLayoutSizeEXT(device, layout, pLayoutSizeInBytes); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + void getDescriptorSetLayoutBindingOffsetEXT(VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset) const noexcept { + fp_vkGetDescriptorSetLayoutBindingOffsetEXT(device, layout, binding, pOffset); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + void getDescriptorEXT(const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor) const noexcept { + fp_vkGetDescriptorEXT(device, pDescriptorInfo, dataSize, pDescriptor); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + void cmdBindDescriptorBuffersEXT(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos) const noexcept { + fp_vkCmdBindDescriptorBuffersEXT(commandBuffer, bufferCount, pBindingInfos); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + void cmdSetDescriptorBufferOffsetsEXT(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets) const noexcept { + fp_vkCmdSetDescriptorBufferOffsetsEXT(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + void cmdBindDescriptorBufferEmbeddedSamplersEXT(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set) const noexcept { + fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT(commandBuffer, pipelineBindPoint, layout, set); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + VkResult getBufferOpaqueCaptureDescriptorDataEXT(const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + return fp_vkGetBufferOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + VkResult getImageOpaqueCaptureDescriptorDataEXT(const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + return fp_vkGetImageOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + VkResult getImageViewOpaqueCaptureDescriptorDataEXT(const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + return fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + VkResult getSamplerOpaqueCaptureDescriptorDataEXT(const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + return fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); + } +#endif +#if (defined(VK_EXT_descriptor_buffer)) + VkResult getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + return fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); + } +#endif +#if (defined(VK_EXT_pageable_device_local_memory)) + void setDeviceMemoryPriorityEXT(VkDeviceMemory memory, float priority) const noexcept { + fp_vkSetDeviceMemoryPriorityEXT(device, memory, priority); + } +#endif +#if (defined(VK_KHR_present_wait)) + VkResult waitForPresentKHR(VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout) const noexcept { + return fp_vkWaitForPresentKHR(device, swapchain, presentId, timeout); + } +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + VkResult createBufferCollectionFUCHSIA(const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection) const noexcept { + return fp_vkCreateBufferCollectionFUCHSIA(device, pCreateInfo, pAllocator, pCollection); + } +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + VkResult setBufferCollectionBufferConstraintsFUCHSIA(VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo) const noexcept { + return fp_vkSetBufferCollectionBufferConstraintsFUCHSIA(device, collection, pBufferConstraintsInfo); + } +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + VkResult setBufferCollectionImageConstraintsFUCHSIA(VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo) const noexcept { + return fp_vkSetBufferCollectionImageConstraintsFUCHSIA(device, collection, pImageConstraintsInfo); + } +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + void destroyBufferCollectionFUCHSIA(VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyBufferCollectionFUCHSIA(device, collection, pAllocator); + } +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + VkResult getBufferCollectionPropertiesFUCHSIA(VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties) const noexcept { + return fp_vkGetBufferCollectionPropertiesFUCHSIA(device, collection, pProperties); + } +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + VkResult createCudaModuleNV(const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule) const noexcept { + return fp_vkCreateCudaModuleNV(device, pCreateInfo, pAllocator, pModule); + } +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + VkResult getCudaModuleCacheNV(VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData) const noexcept { + return fp_vkGetCudaModuleCacheNV(device, module, pCacheSize, pCacheData); + } +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + VkResult createCudaFunctionNV(const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction) const noexcept { + return fp_vkCreateCudaFunctionNV(device, pCreateInfo, pAllocator, pFunction); + } +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + void destroyCudaModuleNV(VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyCudaModuleNV(device, module, pAllocator); + } +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + void destroyCudaFunctionNV(VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyCudaFunctionNV(device, function, pAllocator); + } +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + void cmdCudaLaunchKernelNV(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo) const noexcept { + fp_vkCmdCudaLaunchKernelNV(commandBuffer, pLaunchInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdBeginRendering(VkCommandBuffer commandBuffer, const VkRenderingInfoKHR* pRenderingInfo) const noexcept { + fp_vkCmdBeginRendering(commandBuffer, pRenderingInfo); + } +#endif +#if (defined(VK_VERSION_1_3)) + void cmdEndRendering(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdEndRendering(commandBuffer); + } +#endif +#if (defined(VK_VALVE_descriptor_set_host_mapping)) + void getDescriptorSetLayoutHostMappingInfoVALVE(const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping) const noexcept { + fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE(device, pBindingReference, pHostMapping); + } +#endif +#if (defined(VK_VALVE_descriptor_set_host_mapping)) + void getDescriptorSetHostMappingVALVE(VkDescriptorSet descriptorSet, void** ppData) const noexcept { + fp_vkGetDescriptorSetHostMappingVALVE(device, descriptorSet, ppData); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + VkResult createMicromapEXT(const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap) const noexcept { + return fp_vkCreateMicromapEXT(device, pCreateInfo, pAllocator, pMicromap); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void cmdBuildMicromapsEXT(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos) const noexcept { + fp_vkCmdBuildMicromapsEXT(commandBuffer, infoCount, pInfos); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + VkResult buildMicromapsEXT(VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos) const noexcept { + return fp_vkBuildMicromapsEXT(device, deferredOperation, infoCount, pInfos); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void destroyMicromapEXT(VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyMicromapEXT(device, micromap, pAllocator); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void cmdCopyMicromapEXT(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo) const noexcept { + fp_vkCmdCopyMicromapEXT(commandBuffer, pInfo); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + VkResult copyMicromapEXT(VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo) const noexcept { + return fp_vkCopyMicromapEXT(device, deferredOperation, pInfo); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void cmdCopyMicromapToMemoryEXT(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo) const noexcept { + fp_vkCmdCopyMicromapToMemoryEXT(commandBuffer, pInfo); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + VkResult copyMicromapToMemoryEXT(VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo) const noexcept { + return fp_vkCopyMicromapToMemoryEXT(device, deferredOperation, pInfo); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void cmdCopyMemoryToMicromapEXT(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo) const noexcept { + fp_vkCmdCopyMemoryToMicromapEXT(commandBuffer, pInfo); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + VkResult copyMemoryToMicromapEXT(VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo) const noexcept { + return fp_vkCopyMemoryToMicromapEXT(device, deferredOperation, pInfo); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void cmdWriteMicromapsPropertiesEXT(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const noexcept { + fp_vkCmdWriteMicromapsPropertiesEXT(commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + VkResult writeMicromapsPropertiesEXT(uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride) const noexcept { + return fp_vkWriteMicromapsPropertiesEXT(device, micromapCount, pMicromaps, queryType, dataSize, pData, stride); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void getDeviceMicromapCompatibilityEXT(const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility) const noexcept { + fp_vkGetDeviceMicromapCompatibilityEXT(device, pVersionInfo, pCompatibility); + } +#endif +#if (defined(VK_EXT_opacity_micromap)) + void getMicromapBuildSizesEXT(VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo) const noexcept { + fp_vkGetMicromapBuildSizesEXT(device, buildType, pBuildInfo, pSizeInfo); + } +#endif +#if (defined(VK_EXT_shader_module_identifier)) + void getShaderModuleIdentifierEXT(VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier) const noexcept { + fp_vkGetShaderModuleIdentifierEXT(device, shaderModule, pIdentifier); + } +#endif +#if (defined(VK_EXT_shader_module_identifier)) + void getShaderModuleCreateInfoIdentifierEXT(const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier) const noexcept { + fp_vkGetShaderModuleCreateInfoIdentifierEXT(device, pCreateInfo, pIdentifier); + } +#endif +#if (defined(VK_KHR_maintenance5)) + void getImageSubresourceLayout2KHR(VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout) const noexcept { + fp_vkGetImageSubresourceLayout2KHR(device, image, pSubresource, pLayout); + } +#endif +#if (defined(VK_EXT_pipeline_properties)) + VkResult getPipelinePropertiesEXT(const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties) const noexcept { + return fp_vkGetPipelinePropertiesEXT(device, pPipelineInfo, pPipelineProperties); + } +#endif +#if (defined(VK_EXT_metal_objects)) + void exportMetalObjectsEXT(VkExportMetalObjectsInfoEXT* pMetalObjectsInfo) const noexcept { + fp_vkExportMetalObjectsEXT(device, pMetalObjectsInfo); + } +#endif +#if (defined(VK_QCOM_tile_properties)) + VkResult getFramebufferTilePropertiesQCOM(VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties) const noexcept { + return fp_vkGetFramebufferTilePropertiesQCOM(device, framebuffer, pPropertiesCount, pProperties); + } +#endif +#if (defined(VK_QCOM_tile_properties)) + VkResult getDynamicRenderingTilePropertiesQCOM(const VkRenderingInfoKHR* pRenderingInfo, VkTilePropertiesQCOM* pProperties) const noexcept { + return fp_vkGetDynamicRenderingTilePropertiesQCOM(device, pRenderingInfo, pProperties); + } +#endif +#if (defined(VK_NV_optical_flow)) + VkResult createOpticalFlowSessionNV(const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession) const noexcept { + return fp_vkCreateOpticalFlowSessionNV(device, pCreateInfo, pAllocator, pSession); + } +#endif +#if (defined(VK_NV_optical_flow)) + void destroyOpticalFlowSessionNV(VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyOpticalFlowSessionNV(device, session, pAllocator); + } +#endif +#if (defined(VK_NV_optical_flow)) + VkResult bindOpticalFlowSessionImageNV(VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout) const noexcept { + return fp_vkBindOpticalFlowSessionImageNV(device, session, bindingPoint, view, layout); + } +#endif +#if (defined(VK_NV_optical_flow)) + void cmdOpticalFlowExecuteNV(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo) const noexcept { + fp_vkCmdOpticalFlowExecuteNV(commandBuffer, session, pExecuteInfo); + } +#endif +#if (defined(VK_EXT_device_fault)) + VkResult getDeviceFaultInfoEXT(VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo) const noexcept { + return fp_vkGetDeviceFaultInfoEXT(device, pFaultCounts, pFaultInfo); + } +#endif +#if (defined(VK_EXT_depth_bias_control)) + void cmdSetDepthBias2EXT(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo) const noexcept { + fp_vkCmdSetDepthBias2EXT(commandBuffer, pDepthBiasInfo); + } +#endif +#if (defined(VK_EXT_swapchain_maintenance1)) + VkResult releaseSwapchainImagesEXT(const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) const noexcept { + return fp_vkReleaseSwapchainImagesEXT(device, pReleaseInfo); + } +#endif +#if (defined(VK_KHR_maintenance5)) + void getDeviceImageSubresourceLayoutKHR(const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2EXT* pLayout) const noexcept { + fp_vkGetDeviceImageSubresourceLayoutKHR(device, pInfo, pLayout); + } +#endif +#if (defined(VK_KHR_map_memory2)) + VkResult mapMemory2KHR(const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData) const noexcept { + return fp_vkMapMemory2KHR(device, pMemoryMapInfo, ppData); + } +#endif +#if (defined(VK_KHR_map_memory2)) + VkResult unmapMemory2KHR(const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo) const noexcept { + return fp_vkUnmapMemory2KHR(device, pMemoryUnmapInfo); + } +#endif +#if (defined(VK_EXT_shader_object)) + VkResult createShadersEXT(uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders) const noexcept { + return fp_vkCreateShadersEXT(device, createInfoCount, pCreateInfos, pAllocator, pShaders); + } +#endif +#if (defined(VK_EXT_shader_object)) + void destroyShaderEXT(VkShaderEXT shader, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyShaderEXT(device, shader, pAllocator); + } +#endif +#if (defined(VK_EXT_shader_object)) + VkResult getShaderBinaryDataEXT(VkShaderEXT shader, size_t* pDataSize, void* pData) const noexcept { + return fp_vkGetShaderBinaryDataEXT(device, shader, pDataSize, pData); + } +#endif +#if (defined(VK_EXT_shader_object)) + void cmdBindShadersEXT(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders) const noexcept { + fp_vkCmdBindShadersEXT(commandBuffer, stageCount, pStages, pShaders); + } +#endif +#if (defined(VK_QNX_external_memory_screen_buffer)) + VkResult getScreenBufferPropertiesQNX(const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties) const noexcept { + return fp_vkGetScreenBufferPropertiesQNX(device, buffer, pProperties); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + VkResult getExecutionGraphPipelineScratchSizeAMDX(VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo) const noexcept { + return fp_vkGetExecutionGraphPipelineScratchSizeAMDX(device, executionGraph, pSizeInfo); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + VkResult getExecutionGraphPipelineNodeIndexAMDX(VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex) const noexcept { + return fp_vkGetExecutionGraphPipelineNodeIndexAMDX(device, executionGraph, pNodeInfo, pNodeIndex); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + VkResult createExecutionGraphPipelinesAMDX(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { + return fp_vkCreateExecutionGraphPipelinesAMDX(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + void cmdInitializeGraphScratchMemoryAMDX(VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, VkDeviceSize scratchSize) const noexcept { + fp_vkCmdInitializeGraphScratchMemoryAMDX(commandBuffer, executionGraph, scratch, scratchSize); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + void cmdDispatchGraphAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { + fp_vkCmdDispatchGraphAMDX(commandBuffer, scratch, scratchSize, pCountInfo); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + void cmdDispatchGraphIndirectAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { + fp_vkCmdDispatchGraphIndirectAMDX(commandBuffer, scratch, scratchSize, pCountInfo); + } +#endif +#if (defined(VK_AMDX_shader_enqueue)) + void cmdDispatchGraphIndirectCountAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, VkDeviceAddress countInfo) const noexcept { + fp_vkCmdDispatchGraphIndirectCountAMDX(commandBuffer, scratch, scratchSize, countInfo); + } +#endif +#if (defined(VK_KHR_maintenance6)) + void cmdBindDescriptorSets2KHR(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo) const noexcept { + fp_vkCmdBindDescriptorSets2KHR(commandBuffer, pBindDescriptorSetsInfo); + } +#endif +#if (defined(VK_KHR_maintenance6)) + void cmdPushConstants2KHR(VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR* pPushConstantsInfo) const noexcept { + fp_vkCmdPushConstants2KHR(commandBuffer, pPushConstantsInfo); + } +#endif +#if (defined(VK_KHR_maintenance6)) + void cmdPushDescriptorSet2KHR(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo) const noexcept { + fp_vkCmdPushDescriptorSet2KHR(commandBuffer, pPushDescriptorSetInfo); + } +#endif +#if (defined(VK_KHR_maintenance6)) + void cmdPushDescriptorSetWithTemplate2KHR(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo) const noexcept { + fp_vkCmdPushDescriptorSetWithTemplate2KHR(commandBuffer, pPushDescriptorSetWithTemplateInfo); + } +#endif +#if (defined(VK_KHR_maintenance6)) + void cmdSetDescriptorBufferOffsets2EXT(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo) const noexcept { + fp_vkCmdSetDescriptorBufferOffsets2EXT(commandBuffer, pSetDescriptorBufferOffsetsInfo); + } +#endif +#if (defined(VK_KHR_maintenance6)) + void cmdBindDescriptorBufferEmbeddedSamplers2EXT(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo) const noexcept { + fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo); + } +#endif +#if (defined(VK_NV_low_latency2)) + VkResult setLatencySleepModeNV(VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV* pSleepModeInfo) const noexcept { + return fp_vkSetLatencySleepModeNV(device, swapchain, pSleepModeInfo); + } +#endif +#if (defined(VK_NV_low_latency2)) + VkResult latencySleepNV(VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo) const noexcept { + return fp_vkLatencySleepNV(device, swapchain, pSleepInfo); + } +#endif +#if (defined(VK_NV_low_latency2)) + void setLatencyMarkerNV(VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo) const noexcept { + fp_vkSetLatencyMarkerNV(device, swapchain, pLatencyMarkerInfo); + } +#endif +#if ((defined(VK_NV_low_latency2))) && VK_HEADER_VERSION >= 271 + void getLatencyTimingsNV(VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo) const noexcept { + fp_vkGetLatencyTimingsNV(device, swapchain, pLatencyMarkerInfo); + } +#endif +#if (defined(VK_NV_low_latency2)) + void queueNotifyOutOfBandNV(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo) const noexcept { + fp_vkQueueNotifyOutOfBandNV(queue, pQueueTypeInfo); + } +#endif +#if (defined(VK_KHR_dynamic_rendering_local_read)) + void cmdSetRenderingAttachmentLocationsKHR(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfoKHR* pLocationInfo) const noexcept { + fp_vkCmdSetRenderingAttachmentLocationsKHR(commandBuffer, pLocationInfo); + } +#endif +#if (defined(VK_KHR_dynamic_rendering_local_read)) + void cmdSetRenderingInputAttachmentIndicesKHR(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo) const noexcept { + fp_vkCmdSetRenderingInputAttachmentIndicesKHR(commandBuffer, pInputAttachmentIndexInfo); + } +#endif +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_depth_clamp_control)) + void cmdSetDepthClampRangeEXT(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange) const noexcept { + fp_vkCmdSetDepthClampRangeEXT(commandBuffer, depthClampMode, pDepthClampRange); + } +#endif +#if (defined(VK_EXT_host_query_reset)) + void resetQueryPoolEXT(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept { + fp_vkResetQueryPoolEXT(device, queryPool, firstQuery, queryCount); + } +#endif +#if (defined(VK_KHR_maintenance1)) + void trimCommandPoolKHR(VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags) const noexcept { + fp_vkTrimCommandPoolKHR(device, commandPool, flags); + } +#endif +#if (defined(VK_KHR_device_group)) + void getDeviceGroupPeerMemoryFeaturesKHR(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHR* pPeerMemoryFeatures) const noexcept { + fp_vkGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + } +#endif +#if (defined(VK_KHR_bind_memory2)) + VkResult bindBufferMemory2KHR(uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos) const noexcept { + return fp_vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos); + } +#endif +#if (defined(VK_KHR_bind_memory2)) + VkResult bindImageMemory2KHR(uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const noexcept { + return fp_vkBindImageMemory2KHR(device, bindInfoCount, pBindInfos); + } +#endif +#if (defined(VK_KHR_device_group)) + void cmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const noexcept { + fp_vkCmdSetDeviceMaskKHR(commandBuffer, deviceMask); + } +#endif +#if (defined(VK_KHR_device_group)) + void cmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + fp_vkCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + } +#endif +#if (defined(VK_KHR_descriptor_update_template)) + VkResult createDescriptorUpdateTemplateKHR(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const noexcept { + return fp_vkCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); + } +#endif +#if (defined(VK_KHR_descriptor_update_template)) + void destroyDescriptorUpdateTemplateKHR(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator); + } +#endif +#if (defined(VK_KHR_descriptor_update_template)) + void updateDescriptorSetWithTemplateKHR(VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData) const noexcept { + fp_vkUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData); + } +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + void getBufferMemoryRequirements2KHR(const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + void getImageMemoryRequirements2KHR(const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + void getImageSparseMemoryRequirements2KHR(const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { + fp_vkGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } +#endif +#if (defined(VK_KHR_maintenance4)) + void getDeviceBufferMemoryRequirementsKHR(const VkDeviceBufferMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetDeviceBufferMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_KHR_maintenance4)) + void getDeviceImageMemoryRequirementsKHR(const VkDeviceImageMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + fp_vkGetDeviceImageMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); + } +#endif +#if (defined(VK_KHR_maintenance4)) + void getDeviceImageSparseMemoryRequirementsKHR(const VkDeviceImageMemoryRequirementsKHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { + fp_vkGetDeviceImageSparseMemoryRequirementsKHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } +#endif +#if (defined(VK_KHR_sampler_ycbcr_conversion)) + VkResult createSamplerYcbcrConversionKHR(const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversionKHR* pYcbcrConversion) const noexcept { + return fp_vkCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion); + } +#endif +#if (defined(VK_KHR_sampler_ycbcr_conversion)) + void destroySamplerYcbcrConversionKHR(VkSamplerYcbcrConversionKHR ycbcrConversion, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator); + } +#endif +#if (defined(VK_KHR_maintenance3)) + void getDescriptorSetLayoutSupportKHR(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupportKHR* pSupport) const noexcept { + fp_vkGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport); + } +#endif +#if (defined(VK_EXT_calibrated_timestamps)) + VkResult getCalibratedTimestampsEXT(uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) const noexcept { + return fp_vkGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); + } +#endif +#if (defined(VK_KHR_create_renderpass2)) + VkResult createRenderPass2KHR(const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const noexcept { + return fp_vkCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass); + } +#endif +#if (defined(VK_KHR_create_renderpass2)) + void cmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const noexcept { + fp_vkCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); + } +#endif +#if (defined(VK_KHR_create_renderpass2)) + void cmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + fp_vkCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); + } +#endif +#if (defined(VK_KHR_create_renderpass2)) + void cmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + fp_vkCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo); + } +#endif +#if (defined(VK_KHR_timeline_semaphore)) + VkResult getSemaphoreCounterValueKHR(VkSemaphore semaphore, uint64_t* pValue) const noexcept { + return fp_vkGetSemaphoreCounterValueKHR(device, semaphore, pValue); + } +#endif +#if (defined(VK_KHR_timeline_semaphore)) + VkResult waitSemaphoresKHR(const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const noexcept { + return fp_vkWaitSemaphoresKHR(device, pWaitInfo, timeout); + } +#endif +#if (defined(VK_KHR_timeline_semaphore)) + VkResult signalSemaphoreKHR(const VkSemaphoreSignalInfoKHR* pSignalInfo) const noexcept { + return fp_vkSignalSemaphoreKHR(device, pSignalInfo); + } +#endif +#if (defined(VK_AMD_draw_indirect_count)) + void cmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } +#endif +#if (defined(VK_AMD_draw_indirect_count)) + void cmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + fp_vkCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } +#endif +#if (defined(VK_NV_ray_tracing)) + VkResult getRayTracingShaderGroupHandlesNV(VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept { + return fp_vkGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData); + } +#endif +#if (defined(VK_KHR_buffer_device_address)) + uint64_t getBufferOpaqueCaptureAddressKHR(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + return fp_vkGetBufferOpaqueCaptureAddressKHR(device, pInfo); + } +#endif +#if (defined(VK_EXT_buffer_device_address)) + VkDeviceAddress getBufferDeviceAddressEXT(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + return fp_vkGetBufferDeviceAddressEXT(device, pInfo); + } +#endif +#if (defined(VK_KHR_buffer_device_address)) + uint64_t getDeviceMemoryOpaqueCaptureAddressKHR(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const noexcept { + return fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo); + } +#endif +#if (defined(VK_EXT_line_rasterization)) + void cmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) const noexcept { + fp_vkCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const noexcept { + fp_vkCmdSetCullModeEXT(commandBuffer, cullMode); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const noexcept { + fp_vkCmdSetFrontFaceEXT(commandBuffer, frontFace); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) const noexcept { + fp_vkCmdSetPrimitiveTopologyEXT(commandBuffer, primitiveTopology); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports) const noexcept { + fp_vkCmdSetViewportWithCountEXT(commandBuffer, viewportCount, pViewports); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors) const noexcept { + fp_vkCmdSetScissorWithCountEXT(commandBuffer, scissorCount, pScissors); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides) const noexcept { + fp_vkCmdBindVertexBuffers2EXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const noexcept { + fp_vkCmdSetDepthTestEnableEXT(commandBuffer, depthTestEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const noexcept { + fp_vkCmdSetDepthWriteEnableEXT(commandBuffer, depthWriteEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const noexcept { + fp_vkCmdSetDepthCompareOpEXT(commandBuffer, depthCompareOp); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) const noexcept { + fp_vkCmdSetDepthBoundsTestEnableEXT(commandBuffer, depthBoundsTestEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const noexcept { + fp_vkCmdSetStencilTestEnableEXT(commandBuffer, stencilTestEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + void cmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp) const noexcept { + fp_vkCmdSetStencilOpEXT(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + void cmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable) const noexcept { + fp_vkCmdSetRasterizerDiscardEnableEXT(commandBuffer, rasterizerDiscardEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + void cmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) const noexcept { + fp_vkCmdSetDepthBiasEnableEXT(commandBuffer, depthBiasEnable); + } +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + void cmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable) const noexcept { + fp_vkCmdSetPrimitiveRestartEnableEXT(commandBuffer, primitiveRestartEnable); + } +#endif +#if (defined(VK_EXT_private_data)) + VkResult createPrivateDataSlotEXT(const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot) const noexcept { + return fp_vkCreatePrivateDataSlotEXT(device, pCreateInfo, pAllocator, pPrivateDataSlot); + } +#endif +#if (defined(VK_EXT_private_data)) + void destroyPrivateDataSlotEXT(VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator) const noexcept { + fp_vkDestroyPrivateDataSlotEXT(device, privateDataSlot, pAllocator); + } +#endif +#if (defined(VK_EXT_private_data)) + VkResult setPrivateDataEXT(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data) const noexcept { + return fp_vkSetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, data); + } +#endif +#if (defined(VK_EXT_private_data)) + void getPrivateDataEXT(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData) const noexcept { + fp_vkGetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, pData); + } +#endif +#if (defined(VK_KHR_copy_commands2)) + void cmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) const noexcept { + fp_vkCmdCopyBuffer2KHR(commandBuffer, pCopyBufferInfo); + } +#endif +#if (defined(VK_KHR_copy_commands2)) + void cmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) const noexcept { + fp_vkCmdCopyImage2KHR(commandBuffer, pCopyImageInfo); + } +#endif +#if (defined(VK_KHR_copy_commands2)) + void cmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) const noexcept { + fp_vkCmdBlitImage2KHR(commandBuffer, pBlitImageInfo); + } +#endif +#if (defined(VK_KHR_copy_commands2)) + void cmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) const noexcept { + fp_vkCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo); + } +#endif +#if (defined(VK_KHR_copy_commands2)) + void cmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) const noexcept { + fp_vkCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo); + } +#endif +#if (defined(VK_KHR_copy_commands2)) + void cmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo) const noexcept { + fp_vkCmdResolveImage2KHR(commandBuffer, pResolveImageInfo); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void cmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + fp_vkCmdSetEvent2KHR(commandBuffer, event, pDependencyInfo); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void cmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) const noexcept { + fp_vkCmdResetEvent2KHR(commandBuffer, event, stageMask); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void cmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos) const noexcept { + fp_vkCmdWaitEvents2KHR(commandBuffer, eventCount, pEvents, pDependencyInfos); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void cmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + fp_vkCmdPipelineBarrier2KHR(commandBuffer, pDependencyInfo); + } +#endif +#if (defined(VK_KHR_synchronization2)) + VkResult queueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence) const noexcept { + return fp_vkQueueSubmit2KHR(queue, submitCount, pSubmits, fence); + } +#endif +#if (defined(VK_KHR_synchronization2)) + void cmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query) const noexcept { + fp_vkCmdWriteTimestamp2KHR(commandBuffer, stage, queryPool, query); + } +#endif +#if (defined(VK_KHR_dynamic_rendering)) + void cmdBeginRenderingKHR(VkCommandBuffer commandBuffer, const VkRenderingInfoKHR* pRenderingInfo) const noexcept { + fp_vkCmdBeginRenderingKHR(commandBuffer, pRenderingInfo); + } +#endif +#if (defined(VK_KHR_dynamic_rendering)) + void cmdEndRenderingKHR(VkCommandBuffer commandBuffer) const noexcept { + fp_vkCmdEndRenderingKHR(commandBuffer); + } +#endif +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) + void getImageSubresourceLayout2EXT(VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout) const noexcept { + fp_vkGetImageSubresourceLayout2EXT(device, image, pSubresource, pLayout); + } +#endif + PFN_vkGetDeviceQueue fp_vkGetDeviceQueue = nullptr; + PFN_vkQueueSubmit fp_vkQueueSubmit = nullptr; + PFN_vkQueueWaitIdle fp_vkQueueWaitIdle = nullptr; + PFN_vkDeviceWaitIdle fp_vkDeviceWaitIdle = nullptr; + PFN_vkAllocateMemory fp_vkAllocateMemory = nullptr; + PFN_vkFreeMemory fp_vkFreeMemory = nullptr; + PFN_vkMapMemory fp_vkMapMemory = nullptr; + PFN_vkUnmapMemory fp_vkUnmapMemory = nullptr; + PFN_vkFlushMappedMemoryRanges fp_vkFlushMappedMemoryRanges = nullptr; + PFN_vkInvalidateMappedMemoryRanges fp_vkInvalidateMappedMemoryRanges = nullptr; + PFN_vkGetDeviceMemoryCommitment fp_vkGetDeviceMemoryCommitment = nullptr; + PFN_vkGetBufferMemoryRequirements fp_vkGetBufferMemoryRequirements = nullptr; + PFN_vkBindBufferMemory fp_vkBindBufferMemory = nullptr; + PFN_vkGetImageMemoryRequirements fp_vkGetImageMemoryRequirements = nullptr; + PFN_vkBindImageMemory fp_vkBindImageMemory = nullptr; + PFN_vkGetImageSparseMemoryRequirements fp_vkGetImageSparseMemoryRequirements = nullptr; + PFN_vkQueueBindSparse fp_vkQueueBindSparse = nullptr; + PFN_vkCreateFence fp_vkCreateFence = nullptr; + PFN_vkDestroyFence fp_vkDestroyFence = nullptr; + PFN_vkResetFences fp_vkResetFences = nullptr; + PFN_vkGetFenceStatus fp_vkGetFenceStatus = nullptr; + PFN_vkWaitForFences fp_vkWaitForFences = nullptr; + PFN_vkCreateSemaphore fp_vkCreateSemaphore = nullptr; + PFN_vkDestroySemaphore fp_vkDestroySemaphore = nullptr; + PFN_vkCreateEvent fp_vkCreateEvent = nullptr; + PFN_vkDestroyEvent fp_vkDestroyEvent = nullptr; + PFN_vkGetEventStatus fp_vkGetEventStatus = nullptr; + PFN_vkSetEvent fp_vkSetEvent = nullptr; + PFN_vkResetEvent fp_vkResetEvent = nullptr; + PFN_vkCreateQueryPool fp_vkCreateQueryPool = nullptr; + PFN_vkDestroyQueryPool fp_vkDestroyQueryPool = nullptr; + PFN_vkGetQueryPoolResults fp_vkGetQueryPoolResults = nullptr; +#if (defined(VK_VERSION_1_2)) + PFN_vkResetQueryPool fp_vkResetQueryPool = nullptr; +#else + void * fp_vkResetQueryPool{}; +#endif + PFN_vkCreateBuffer fp_vkCreateBuffer = nullptr; + PFN_vkDestroyBuffer fp_vkDestroyBuffer = nullptr; + PFN_vkCreateBufferView fp_vkCreateBufferView = nullptr; + PFN_vkDestroyBufferView fp_vkDestroyBufferView = nullptr; + PFN_vkCreateImage fp_vkCreateImage = nullptr; + PFN_vkDestroyImage fp_vkDestroyImage = nullptr; + PFN_vkGetImageSubresourceLayout fp_vkGetImageSubresourceLayout = nullptr; + PFN_vkCreateImageView fp_vkCreateImageView = nullptr; + PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; + PFN_vkCreateShaderModule fp_vkCreateShaderModule = nullptr; + PFN_vkDestroyShaderModule fp_vkDestroyShaderModule = nullptr; + PFN_vkCreatePipelineCache fp_vkCreatePipelineCache = nullptr; + PFN_vkDestroyPipelineCache fp_vkDestroyPipelineCache = nullptr; + PFN_vkGetPipelineCacheData fp_vkGetPipelineCacheData = nullptr; + PFN_vkMergePipelineCaches fp_vkMergePipelineCaches = nullptr; +#if (defined(VK_KHR_pipeline_binary)) + PFN_vkCreatePipelineBinariesKHR fp_vkCreatePipelineBinariesKHR = nullptr; +#else + void * fp_vkCreatePipelineBinariesKHR{}; +#endif +#if (defined(VK_KHR_pipeline_binary)) + PFN_vkDestroyPipelineBinaryKHR fp_vkDestroyPipelineBinaryKHR = nullptr; +#else + void * fp_vkDestroyPipelineBinaryKHR{}; +#endif +#if (defined(VK_KHR_pipeline_binary)) + PFN_vkGetPipelineKeyKHR fp_vkGetPipelineKeyKHR = nullptr; +#else + void * fp_vkGetPipelineKeyKHR{}; +#endif +#if (defined(VK_KHR_pipeline_binary)) + PFN_vkGetPipelineBinaryDataKHR fp_vkGetPipelineBinaryDataKHR = nullptr; +#else + void * fp_vkGetPipelineBinaryDataKHR{}; +#endif +#if (defined(VK_KHR_pipeline_binary)) + PFN_vkReleaseCapturedPipelineDataKHR fp_vkReleaseCapturedPipelineDataKHR = nullptr; +#else + void * fp_vkReleaseCapturedPipelineDataKHR{}; +#endif + PFN_vkCreateGraphicsPipelines fp_vkCreateGraphicsPipelines = nullptr; + PFN_vkCreateComputePipelines fp_vkCreateComputePipelines = nullptr; +#if (defined(VK_HUAWEI_subpass_shading)) + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = nullptr; +#else + void * fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI{}; +#endif + PFN_vkDestroyPipeline fp_vkDestroyPipeline = nullptr; + PFN_vkCreatePipelineLayout fp_vkCreatePipelineLayout = nullptr; + PFN_vkDestroyPipelineLayout fp_vkDestroyPipelineLayout = nullptr; + PFN_vkCreateSampler fp_vkCreateSampler = nullptr; + PFN_vkDestroySampler fp_vkDestroySampler = nullptr; + PFN_vkCreateDescriptorSetLayout fp_vkCreateDescriptorSetLayout = nullptr; + PFN_vkDestroyDescriptorSetLayout fp_vkDestroyDescriptorSetLayout = nullptr; + PFN_vkCreateDescriptorPool fp_vkCreateDescriptorPool = nullptr; + PFN_vkDestroyDescriptorPool fp_vkDestroyDescriptorPool = nullptr; + PFN_vkResetDescriptorPool fp_vkResetDescriptorPool = nullptr; + PFN_vkAllocateDescriptorSets fp_vkAllocateDescriptorSets = nullptr; + PFN_vkFreeDescriptorSets fp_vkFreeDescriptorSets = nullptr; + PFN_vkUpdateDescriptorSets fp_vkUpdateDescriptorSets = nullptr; + PFN_vkCreateFramebuffer fp_vkCreateFramebuffer = nullptr; + PFN_vkDestroyFramebuffer fp_vkDestroyFramebuffer = nullptr; + PFN_vkCreateRenderPass fp_vkCreateRenderPass = nullptr; + PFN_vkDestroyRenderPass fp_vkDestroyRenderPass = nullptr; + PFN_vkGetRenderAreaGranularity fp_vkGetRenderAreaGranularity = nullptr; +#if (defined(VK_KHR_maintenance5)) + PFN_vkGetRenderingAreaGranularityKHR fp_vkGetRenderingAreaGranularityKHR = nullptr; +#else + void * fp_vkGetRenderingAreaGranularityKHR{}; +#endif + PFN_vkCreateCommandPool fp_vkCreateCommandPool = nullptr; + PFN_vkDestroyCommandPool fp_vkDestroyCommandPool = nullptr; + PFN_vkResetCommandPool fp_vkResetCommandPool = nullptr; + PFN_vkAllocateCommandBuffers fp_vkAllocateCommandBuffers = nullptr; + PFN_vkFreeCommandBuffers fp_vkFreeCommandBuffers = nullptr; + PFN_vkBeginCommandBuffer fp_vkBeginCommandBuffer = nullptr; + PFN_vkEndCommandBuffer fp_vkEndCommandBuffer = nullptr; + PFN_vkResetCommandBuffer fp_vkResetCommandBuffer = nullptr; + PFN_vkCmdBindPipeline fp_vkCmdBindPipeline = nullptr; +#if (defined(VK_EXT_attachment_feedback_loop_dynamic_state)) + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT fp_vkCmdSetAttachmentFeedbackLoopEnableEXT = nullptr; +#else + void * fp_vkCmdSetAttachmentFeedbackLoopEnableEXT{}; +#endif + PFN_vkCmdSetViewport fp_vkCmdSetViewport = nullptr; + PFN_vkCmdSetScissor fp_vkCmdSetScissor = nullptr; + PFN_vkCmdSetLineWidth fp_vkCmdSetLineWidth = nullptr; + PFN_vkCmdSetDepthBias fp_vkCmdSetDepthBias = nullptr; + PFN_vkCmdSetBlendConstants fp_vkCmdSetBlendConstants = nullptr; + PFN_vkCmdSetDepthBounds fp_vkCmdSetDepthBounds = nullptr; + PFN_vkCmdSetStencilCompareMask fp_vkCmdSetStencilCompareMask = nullptr; + PFN_vkCmdSetStencilWriteMask fp_vkCmdSetStencilWriteMask = nullptr; + PFN_vkCmdSetStencilReference fp_vkCmdSetStencilReference = nullptr; + PFN_vkCmdBindDescriptorSets fp_vkCmdBindDescriptorSets = nullptr; + PFN_vkCmdBindIndexBuffer fp_vkCmdBindIndexBuffer = nullptr; + PFN_vkCmdBindVertexBuffers fp_vkCmdBindVertexBuffers = nullptr; + PFN_vkCmdDraw fp_vkCmdDraw = nullptr; + PFN_vkCmdDrawIndexed fp_vkCmdDrawIndexed = nullptr; +#if (defined(VK_EXT_multi_draw)) + PFN_vkCmdDrawMultiEXT fp_vkCmdDrawMultiEXT = nullptr; +#else + void * fp_vkCmdDrawMultiEXT{}; +#endif +#if (defined(VK_EXT_multi_draw)) + PFN_vkCmdDrawMultiIndexedEXT fp_vkCmdDrawMultiIndexedEXT = nullptr; +#else + void * fp_vkCmdDrawMultiIndexedEXT{}; +#endif + PFN_vkCmdDrawIndirect fp_vkCmdDrawIndirect = nullptr; + PFN_vkCmdDrawIndexedIndirect fp_vkCmdDrawIndexedIndirect = nullptr; + PFN_vkCmdDispatch fp_vkCmdDispatch = nullptr; + PFN_vkCmdDispatchIndirect fp_vkCmdDispatchIndirect = nullptr; +#if (defined(VK_HUAWEI_subpass_shading)) + PFN_vkCmdSubpassShadingHUAWEI fp_vkCmdSubpassShadingHUAWEI = nullptr; +#else + void * fp_vkCmdSubpassShadingHUAWEI{}; +#endif +#if (defined(VK_HUAWEI_cluster_culling_shader)) + PFN_vkCmdDrawClusterHUAWEI fp_vkCmdDrawClusterHUAWEI = nullptr; +#else + void * fp_vkCmdDrawClusterHUAWEI{}; +#endif +#if (defined(VK_HUAWEI_cluster_culling_shader)) + PFN_vkCmdDrawClusterIndirectHUAWEI fp_vkCmdDrawClusterIndirectHUAWEI = nullptr; +#else + void * fp_vkCmdDrawClusterIndirectHUAWEI{}; +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + PFN_vkCmdUpdatePipelineIndirectBufferNV fp_vkCmdUpdatePipelineIndirectBufferNV = nullptr; +#else + void * fp_vkCmdUpdatePipelineIndirectBufferNV{}; +#endif + PFN_vkCmdCopyBuffer fp_vkCmdCopyBuffer = nullptr; + PFN_vkCmdCopyImage fp_vkCmdCopyImage = nullptr; + PFN_vkCmdBlitImage fp_vkCmdBlitImage = nullptr; + PFN_vkCmdCopyBufferToImage fp_vkCmdCopyBufferToImage = nullptr; + PFN_vkCmdCopyImageToBuffer fp_vkCmdCopyImageToBuffer = nullptr; +#if (defined(VK_NV_copy_memory_indirect)) + PFN_vkCmdCopyMemoryIndirectNV fp_vkCmdCopyMemoryIndirectNV = nullptr; +#else + void * fp_vkCmdCopyMemoryIndirectNV{}; +#endif +#if (defined(VK_NV_copy_memory_indirect)) + PFN_vkCmdCopyMemoryToImageIndirectNV fp_vkCmdCopyMemoryToImageIndirectNV = nullptr; +#else + void * fp_vkCmdCopyMemoryToImageIndirectNV{}; +#endif + PFN_vkCmdUpdateBuffer fp_vkCmdUpdateBuffer = nullptr; + PFN_vkCmdFillBuffer fp_vkCmdFillBuffer = nullptr; + PFN_vkCmdClearColorImage fp_vkCmdClearColorImage = nullptr; + PFN_vkCmdClearDepthStencilImage fp_vkCmdClearDepthStencilImage = nullptr; + PFN_vkCmdClearAttachments fp_vkCmdClearAttachments = nullptr; + PFN_vkCmdResolveImage fp_vkCmdResolveImage = nullptr; + PFN_vkCmdSetEvent fp_vkCmdSetEvent = nullptr; + PFN_vkCmdResetEvent fp_vkCmdResetEvent = nullptr; + PFN_vkCmdWaitEvents fp_vkCmdWaitEvents = nullptr; + PFN_vkCmdPipelineBarrier fp_vkCmdPipelineBarrier = nullptr; + PFN_vkCmdBeginQuery fp_vkCmdBeginQuery = nullptr; + PFN_vkCmdEndQuery fp_vkCmdEndQuery = nullptr; +#if (defined(VK_EXT_conditional_rendering)) + PFN_vkCmdBeginConditionalRenderingEXT fp_vkCmdBeginConditionalRenderingEXT = nullptr; +#else + void * fp_vkCmdBeginConditionalRenderingEXT{}; +#endif +#if (defined(VK_EXT_conditional_rendering)) + PFN_vkCmdEndConditionalRenderingEXT fp_vkCmdEndConditionalRenderingEXT = nullptr; +#else + void * fp_vkCmdEndConditionalRenderingEXT{}; +#endif + PFN_vkCmdResetQueryPool fp_vkCmdResetQueryPool = nullptr; + PFN_vkCmdWriteTimestamp fp_vkCmdWriteTimestamp = nullptr; + PFN_vkCmdCopyQueryPoolResults fp_vkCmdCopyQueryPoolResults = nullptr; + PFN_vkCmdPushConstants fp_vkCmdPushConstants = nullptr; + PFN_vkCmdBeginRenderPass fp_vkCmdBeginRenderPass = nullptr; + PFN_vkCmdNextSubpass fp_vkCmdNextSubpass = nullptr; + PFN_vkCmdEndRenderPass fp_vkCmdEndRenderPass = nullptr; + PFN_vkCmdExecuteCommands fp_vkCmdExecuteCommands = nullptr; +#if (defined(VK_KHR_display_swapchain)) + PFN_vkCreateSharedSwapchainsKHR fp_vkCreateSharedSwapchainsKHR = nullptr; +#else + void * fp_vkCreateSharedSwapchainsKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) + PFN_vkCreateSwapchainKHR fp_vkCreateSwapchainKHR = nullptr; +#else + void * fp_vkCreateSwapchainKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) + PFN_vkDestroySwapchainKHR fp_vkDestroySwapchainKHR = nullptr; +#else + void * fp_vkDestroySwapchainKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) + PFN_vkGetSwapchainImagesKHR fp_vkGetSwapchainImagesKHR = nullptr; +#else + void * fp_vkGetSwapchainImagesKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) + PFN_vkAcquireNextImageKHR fp_vkAcquireNextImageKHR = nullptr; +#else + void * fp_vkAcquireNextImageKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) + PFN_vkQueuePresentKHR fp_vkQueuePresentKHR = nullptr; +#else + void * fp_vkQueuePresentKHR{}; +#endif +#if (defined(VK_EXT_debug_marker)) + PFN_vkDebugMarkerSetObjectNameEXT fp_vkDebugMarkerSetObjectNameEXT = nullptr; +#else + void * fp_vkDebugMarkerSetObjectNameEXT{}; +#endif +#if (defined(VK_EXT_debug_marker)) + PFN_vkDebugMarkerSetObjectTagEXT fp_vkDebugMarkerSetObjectTagEXT = nullptr; +#else + void * fp_vkDebugMarkerSetObjectTagEXT{}; +#endif +#if (defined(VK_EXT_debug_marker)) + PFN_vkCmdDebugMarkerBeginEXT fp_vkCmdDebugMarkerBeginEXT = nullptr; +#else + void * fp_vkCmdDebugMarkerBeginEXT{}; +#endif +#if (defined(VK_EXT_debug_marker)) + PFN_vkCmdDebugMarkerEndEXT fp_vkCmdDebugMarkerEndEXT = nullptr; +#else + void * fp_vkCmdDebugMarkerEndEXT{}; +#endif +#if (defined(VK_EXT_debug_marker)) + PFN_vkCmdDebugMarkerInsertEXT fp_vkCmdDebugMarkerInsertEXT = nullptr; +#else + void * fp_vkCmdDebugMarkerInsertEXT{}; +#endif +#if (defined(VK_NV_external_memory_win32)) + PFN_vkGetMemoryWin32HandleNV fp_vkGetMemoryWin32HandleNV = nullptr; +#else + void * fp_vkGetMemoryWin32HandleNV{}; +#endif +#if (defined(VK_NV_device_generated_commands)) + PFN_vkCmdExecuteGeneratedCommandsNV fp_vkCmdExecuteGeneratedCommandsNV = nullptr; +#else + void * fp_vkCmdExecuteGeneratedCommandsNV{}; +#endif +#if (defined(VK_NV_device_generated_commands)) + PFN_vkCmdPreprocessGeneratedCommandsNV fp_vkCmdPreprocessGeneratedCommandsNV = nullptr; +#else + void * fp_vkCmdPreprocessGeneratedCommandsNV{}; +#endif +#if (defined(VK_NV_device_generated_commands)) + PFN_vkCmdBindPipelineShaderGroupNV fp_vkCmdBindPipelineShaderGroupNV = nullptr; +#else + void * fp_vkCmdBindPipelineShaderGroupNV{}; +#endif +#if (defined(VK_NV_device_generated_commands)) + PFN_vkGetGeneratedCommandsMemoryRequirementsNV fp_vkGetGeneratedCommandsMemoryRequirementsNV = nullptr; +#else + void * fp_vkGetGeneratedCommandsMemoryRequirementsNV{}; +#endif +#if (defined(VK_NV_device_generated_commands)) + PFN_vkCreateIndirectCommandsLayoutNV fp_vkCreateIndirectCommandsLayoutNV = nullptr; +#else + void * fp_vkCreateIndirectCommandsLayoutNV{}; +#endif +#if (defined(VK_NV_device_generated_commands)) + PFN_vkDestroyIndirectCommandsLayoutNV fp_vkDestroyIndirectCommandsLayoutNV = nullptr; +#else + void * fp_vkDestroyIndirectCommandsLayoutNV{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkCmdExecuteGeneratedCommandsEXT fp_vkCmdExecuteGeneratedCommandsEXT = nullptr; +#else + void * fp_vkCmdExecuteGeneratedCommandsEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkCmdPreprocessGeneratedCommandsEXT fp_vkCmdPreprocessGeneratedCommandsEXT = nullptr; +#else + void * fp_vkCmdPreprocessGeneratedCommandsEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT fp_vkGetGeneratedCommandsMemoryRequirementsEXT = nullptr; +#else + void * fp_vkGetGeneratedCommandsMemoryRequirementsEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkCreateIndirectCommandsLayoutEXT fp_vkCreateIndirectCommandsLayoutEXT = nullptr; +#else + void * fp_vkCreateIndirectCommandsLayoutEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkDestroyIndirectCommandsLayoutEXT fp_vkDestroyIndirectCommandsLayoutEXT = nullptr; +#else + void * fp_vkDestroyIndirectCommandsLayoutEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkCreateIndirectExecutionSetEXT fp_vkCreateIndirectExecutionSetEXT = nullptr; +#else + void * fp_vkCreateIndirectExecutionSetEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkDestroyIndirectExecutionSetEXT fp_vkDestroyIndirectExecutionSetEXT = nullptr; +#else + void * fp_vkDestroyIndirectExecutionSetEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkUpdateIndirectExecutionSetPipelineEXT fp_vkUpdateIndirectExecutionSetPipelineEXT = nullptr; +#else + void * fp_vkUpdateIndirectExecutionSetPipelineEXT{}; +#endif +#if (defined(VK_EXT_device_generated_commands)) + PFN_vkUpdateIndirectExecutionSetShaderEXT fp_vkUpdateIndirectExecutionSetShaderEXT = nullptr; +#else + void * fp_vkUpdateIndirectExecutionSetShaderEXT{}; +#endif +#if (defined(VK_KHR_push_descriptor)) + PFN_vkCmdPushDescriptorSetKHR fp_vkCmdPushDescriptorSetKHR = nullptr; +#else + void * fp_vkCmdPushDescriptorSetKHR{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkTrimCommandPool fp_vkTrimCommandPool = nullptr; +#else + void * fp_vkTrimCommandPool{}; +#endif +#if (defined(VK_KHR_external_memory_win32)) + PFN_vkGetMemoryWin32HandleKHR fp_vkGetMemoryWin32HandleKHR = nullptr; +#else + void * fp_vkGetMemoryWin32HandleKHR{}; +#endif +#if (defined(VK_KHR_external_memory_win32)) + PFN_vkGetMemoryWin32HandlePropertiesKHR fp_vkGetMemoryWin32HandlePropertiesKHR = nullptr; +#else + void * fp_vkGetMemoryWin32HandlePropertiesKHR{}; +#endif +#if (defined(VK_KHR_external_memory_fd)) + PFN_vkGetMemoryFdKHR fp_vkGetMemoryFdKHR = nullptr; +#else + void * fp_vkGetMemoryFdKHR{}; +#endif +#if (defined(VK_KHR_external_memory_fd)) + PFN_vkGetMemoryFdPropertiesKHR fp_vkGetMemoryFdPropertiesKHR = nullptr; +#else + void * fp_vkGetMemoryFdPropertiesKHR{}; +#endif +#if (defined(VK_FUCHSIA_external_memory)) + PFN_vkGetMemoryZirconHandleFUCHSIA fp_vkGetMemoryZirconHandleFUCHSIA = nullptr; +#else + void * fp_vkGetMemoryZirconHandleFUCHSIA{}; +#endif +#if (defined(VK_FUCHSIA_external_memory)) + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA fp_vkGetMemoryZirconHandlePropertiesFUCHSIA = nullptr; +#else + void * fp_vkGetMemoryZirconHandlePropertiesFUCHSIA{}; +#endif +#if (defined(VK_NV_external_memory_rdma)) + PFN_vkGetMemoryRemoteAddressNV fp_vkGetMemoryRemoteAddressNV = nullptr; +#else + void * fp_vkGetMemoryRemoteAddressNV{}; +#endif +#if (defined(VK_NV_external_memory_sci_buf)) + PFN_vkGetMemorySciBufNV fp_vkGetMemorySciBufNV = nullptr; +#else + void * fp_vkGetMemorySciBufNV{}; +#endif +#if (defined(VK_KHR_external_semaphore_win32)) + PFN_vkGetSemaphoreWin32HandleKHR fp_vkGetSemaphoreWin32HandleKHR = nullptr; +#else + void * fp_vkGetSemaphoreWin32HandleKHR{}; +#endif +#if (defined(VK_KHR_external_semaphore_win32)) + PFN_vkImportSemaphoreWin32HandleKHR fp_vkImportSemaphoreWin32HandleKHR = nullptr; +#else + void * fp_vkImportSemaphoreWin32HandleKHR{}; +#endif +#if (defined(VK_KHR_external_semaphore_fd)) + PFN_vkGetSemaphoreFdKHR fp_vkGetSemaphoreFdKHR = nullptr; +#else + void * fp_vkGetSemaphoreFdKHR{}; +#endif +#if (defined(VK_KHR_external_semaphore_fd)) + PFN_vkImportSemaphoreFdKHR fp_vkImportSemaphoreFdKHR = nullptr; +#else + void * fp_vkImportSemaphoreFdKHR{}; +#endif +#if (defined(VK_FUCHSIA_external_semaphore)) + PFN_vkGetSemaphoreZirconHandleFUCHSIA fp_vkGetSemaphoreZirconHandleFUCHSIA = nullptr; +#else + void * fp_vkGetSemaphoreZirconHandleFUCHSIA{}; +#endif +#if (defined(VK_FUCHSIA_external_semaphore)) + PFN_vkImportSemaphoreZirconHandleFUCHSIA fp_vkImportSemaphoreZirconHandleFUCHSIA = nullptr; +#else + void * fp_vkImportSemaphoreZirconHandleFUCHSIA{}; +#endif +#if (defined(VK_KHR_external_fence_win32)) + PFN_vkGetFenceWin32HandleKHR fp_vkGetFenceWin32HandleKHR = nullptr; +#else + void * fp_vkGetFenceWin32HandleKHR{}; +#endif +#if (defined(VK_KHR_external_fence_win32)) + PFN_vkImportFenceWin32HandleKHR fp_vkImportFenceWin32HandleKHR = nullptr; +#else + void * fp_vkImportFenceWin32HandleKHR{}; +#endif +#if (defined(VK_KHR_external_fence_fd)) + PFN_vkGetFenceFdKHR fp_vkGetFenceFdKHR = nullptr; +#else + void * fp_vkGetFenceFdKHR{}; +#endif +#if (defined(VK_KHR_external_fence_fd)) + PFN_vkImportFenceFdKHR fp_vkImportFenceFdKHR = nullptr; +#else + void * fp_vkImportFenceFdKHR{}; +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + PFN_vkGetFenceSciSyncFenceNV fp_vkGetFenceSciSyncFenceNV = nullptr; +#else + void * fp_vkGetFenceSciSyncFenceNV{}; +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + PFN_vkGetFenceSciSyncObjNV fp_vkGetFenceSciSyncObjNV = nullptr; +#else + void * fp_vkGetFenceSciSyncObjNV{}; +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + PFN_vkImportFenceSciSyncFenceNV fp_vkImportFenceSciSyncFenceNV = nullptr; +#else + void * fp_vkImportFenceSciSyncFenceNV{}; +#endif +#if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) + PFN_vkImportFenceSciSyncObjNV fp_vkImportFenceSciSyncObjNV = nullptr; +#else + void * fp_vkImportFenceSciSyncObjNV{}; +#endif +#if (defined(VK_NV_external_sci_sync)) + PFN_vkGetSemaphoreSciSyncObjNV fp_vkGetSemaphoreSciSyncObjNV = nullptr; +#else + void * fp_vkGetSemaphoreSciSyncObjNV{}; +#endif +#if (defined(VK_NV_external_sci_sync)) + PFN_vkImportSemaphoreSciSyncObjNV fp_vkImportSemaphoreSciSyncObjNV = nullptr; +#else + void * fp_vkImportSemaphoreSciSyncObjNV{}; +#endif +#if (defined(VK_NV_external_sci_sync2)) + PFN_vkCreateSemaphoreSciSyncPoolNV fp_vkCreateSemaphoreSciSyncPoolNV = nullptr; +#else + void * fp_vkCreateSemaphoreSciSyncPoolNV{}; +#endif +#if (defined(VK_NV_external_sci_sync2)) + PFN_vkDestroySemaphoreSciSyncPoolNV fp_vkDestroySemaphoreSciSyncPoolNV = nullptr; +#else + void * fp_vkDestroySemaphoreSciSyncPoolNV{}; +#endif +#if (defined(VK_EXT_display_control)) + PFN_vkDisplayPowerControlEXT fp_vkDisplayPowerControlEXT = nullptr; +#else + void * fp_vkDisplayPowerControlEXT{}; +#endif +#if (defined(VK_EXT_display_control)) + PFN_vkRegisterDeviceEventEXT fp_vkRegisterDeviceEventEXT = nullptr; +#else + void * fp_vkRegisterDeviceEventEXT{}; +#endif +#if (defined(VK_EXT_display_control)) + PFN_vkRegisterDisplayEventEXT fp_vkRegisterDisplayEventEXT = nullptr; +#else + void * fp_vkRegisterDisplayEventEXT{}; +#endif +#if (defined(VK_EXT_display_control)) + PFN_vkGetSwapchainCounterEXT fp_vkGetSwapchainCounterEXT = nullptr; +#else + void * fp_vkGetSwapchainCounterEXT{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetDeviceGroupPeerMemoryFeatures fp_vkGetDeviceGroupPeerMemoryFeatures = nullptr; +#else + void * fp_vkGetDeviceGroupPeerMemoryFeatures{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkBindBufferMemory2 fp_vkBindBufferMemory2 = nullptr; +#else + void * fp_vkBindBufferMemory2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkBindImageMemory2 fp_vkBindImageMemory2 = nullptr; +#else + void * fp_vkBindImageMemory2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkCmdSetDeviceMask fp_vkCmdSetDeviceMask = nullptr; +#else + void * fp_vkCmdSetDeviceMask{}; +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + PFN_vkGetDeviceGroupPresentCapabilitiesKHR fp_vkGetDeviceGroupPresentCapabilitiesKHR = nullptr; +#else + void * fp_vkGetDeviceGroupPresentCapabilitiesKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + PFN_vkGetDeviceGroupSurfacePresentModesKHR fp_vkGetDeviceGroupSurfacePresentModesKHR = nullptr; +#else + void * fp_vkGetDeviceGroupSurfacePresentModesKHR{}; +#endif +#if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) + PFN_vkAcquireNextImage2KHR fp_vkAcquireNextImage2KHR = nullptr; +#else + void * fp_vkAcquireNextImage2KHR{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkCmdDispatchBase fp_vkCmdDispatchBase = nullptr; +#else + void * fp_vkCmdDispatchBase{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkCreateDescriptorUpdateTemplate fp_vkCreateDescriptorUpdateTemplate = nullptr; +#else + void * fp_vkCreateDescriptorUpdateTemplate{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkDestroyDescriptorUpdateTemplate fp_vkDestroyDescriptorUpdateTemplate = nullptr; +#else + void * fp_vkDestroyDescriptorUpdateTemplate{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkUpdateDescriptorSetWithTemplate fp_vkUpdateDescriptorSetWithTemplate = nullptr; +#else + void * fp_vkUpdateDescriptorSetWithTemplate{}; +#endif +#if (defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_descriptor_update_template)) + PFN_vkCmdPushDescriptorSetWithTemplateKHR fp_vkCmdPushDescriptorSetWithTemplateKHR = nullptr; +#else + void * fp_vkCmdPushDescriptorSetWithTemplateKHR{}; +#endif +#if (defined(VK_EXT_hdr_metadata)) + PFN_vkSetHdrMetadataEXT fp_vkSetHdrMetadataEXT = nullptr; +#else + void * fp_vkSetHdrMetadataEXT{}; +#endif +#if (defined(VK_KHR_shared_presentable_image)) + PFN_vkGetSwapchainStatusKHR fp_vkGetSwapchainStatusKHR = nullptr; +#else + void * fp_vkGetSwapchainStatusKHR{}; +#endif +#if (defined(VK_GOOGLE_display_timing)) + PFN_vkGetRefreshCycleDurationGOOGLE fp_vkGetRefreshCycleDurationGOOGLE = nullptr; +#else + void * fp_vkGetRefreshCycleDurationGOOGLE{}; +#endif +#if (defined(VK_GOOGLE_display_timing)) + PFN_vkGetPastPresentationTimingGOOGLE fp_vkGetPastPresentationTimingGOOGLE = nullptr; +#else + void * fp_vkGetPastPresentationTimingGOOGLE{}; +#endif +#if (defined(VK_NV_clip_space_w_scaling)) + PFN_vkCmdSetViewportWScalingNV fp_vkCmdSetViewportWScalingNV = nullptr; +#else + void * fp_vkCmdSetViewportWScalingNV{}; +#endif +#if (defined(VK_EXT_discard_rectangles)) + PFN_vkCmdSetDiscardRectangleEXT fp_vkCmdSetDiscardRectangleEXT = nullptr; +#else + void * fp_vkCmdSetDiscardRectangleEXT{}; +#endif +#if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 + PFN_vkCmdSetDiscardRectangleEnableEXT fp_vkCmdSetDiscardRectangleEnableEXT = nullptr; +#else + void * fp_vkCmdSetDiscardRectangleEnableEXT{}; +#endif +#if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 + PFN_vkCmdSetDiscardRectangleModeEXT fp_vkCmdSetDiscardRectangleModeEXT = nullptr; +#else + void * fp_vkCmdSetDiscardRectangleModeEXT{}; +#endif +#if (defined(VK_EXT_sample_locations)) + PFN_vkCmdSetSampleLocationsEXT fp_vkCmdSetSampleLocationsEXT = nullptr; +#else + void * fp_vkCmdSetSampleLocationsEXT{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetBufferMemoryRequirements2 fp_vkGetBufferMemoryRequirements2 = nullptr; +#else + void * fp_vkGetBufferMemoryRequirements2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetImageMemoryRequirements2 fp_vkGetImageMemoryRequirements2 = nullptr; +#else + void * fp_vkGetImageMemoryRequirements2{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetImageSparseMemoryRequirements2 fp_vkGetImageSparseMemoryRequirements2 = nullptr; +#else + void * fp_vkGetImageSparseMemoryRequirements2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkGetDeviceBufferMemoryRequirements fp_vkGetDeviceBufferMemoryRequirements = nullptr; +#else + void * fp_vkGetDeviceBufferMemoryRequirements{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkGetDeviceImageMemoryRequirements fp_vkGetDeviceImageMemoryRequirements = nullptr; +#else + void * fp_vkGetDeviceImageMemoryRequirements{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkGetDeviceImageSparseMemoryRequirements fp_vkGetDeviceImageSparseMemoryRequirements = nullptr; +#else + void * fp_vkGetDeviceImageSparseMemoryRequirements{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkCreateSamplerYcbcrConversion fp_vkCreateSamplerYcbcrConversion = nullptr; +#else + void * fp_vkCreateSamplerYcbcrConversion{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkDestroySamplerYcbcrConversion fp_vkDestroySamplerYcbcrConversion = nullptr; +#else + void * fp_vkDestroySamplerYcbcrConversion{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetDeviceQueue2 fp_vkGetDeviceQueue2 = nullptr; +#else + void * fp_vkGetDeviceQueue2{}; +#endif +#if (defined(VK_EXT_validation_cache)) + PFN_vkCreateValidationCacheEXT fp_vkCreateValidationCacheEXT = nullptr; +#else + void * fp_vkCreateValidationCacheEXT{}; +#endif +#if (defined(VK_EXT_validation_cache)) + PFN_vkDestroyValidationCacheEXT fp_vkDestroyValidationCacheEXT = nullptr; +#else + void * fp_vkDestroyValidationCacheEXT{}; +#endif +#if (defined(VK_EXT_validation_cache)) + PFN_vkGetValidationCacheDataEXT fp_vkGetValidationCacheDataEXT = nullptr; +#else + void * fp_vkGetValidationCacheDataEXT{}; +#endif +#if (defined(VK_EXT_validation_cache)) + PFN_vkMergeValidationCachesEXT fp_vkMergeValidationCachesEXT = nullptr; +#else + void * fp_vkMergeValidationCachesEXT{}; +#endif +#if (defined(VK_VERSION_1_1)) + PFN_vkGetDescriptorSetLayoutSupport fp_vkGetDescriptorSetLayoutSupport = nullptr; +#else + void * fp_vkGetDescriptorSetLayoutSupport{}; +#endif +#if (defined(VK_ANDROID_native_buffer)) + PFN_vkGetSwapchainGrallocUsageANDROID fp_vkGetSwapchainGrallocUsageANDROID = nullptr; +#else + void * fp_vkGetSwapchainGrallocUsageANDROID{}; +#endif +#if (defined(VK_ANDROID_native_buffer)) + PFN_vkGetSwapchainGrallocUsage2ANDROID fp_vkGetSwapchainGrallocUsage2ANDROID = nullptr; +#else + void * fp_vkGetSwapchainGrallocUsage2ANDROID{}; +#endif +#if (defined(VK_ANDROID_native_buffer)) + PFN_vkAcquireImageANDROID fp_vkAcquireImageANDROID = nullptr; +#else + void * fp_vkAcquireImageANDROID{}; +#endif +#if (defined(VK_ANDROID_native_buffer)) + PFN_vkQueueSignalReleaseImageANDROID fp_vkQueueSignalReleaseImageANDROID = nullptr; +#else + void * fp_vkQueueSignalReleaseImageANDROID{}; +#endif +#if (defined(VK_AMD_shader_info)) + PFN_vkGetShaderInfoAMD fp_vkGetShaderInfoAMD = nullptr; +#else + void * fp_vkGetShaderInfoAMD{}; +#endif +#if (defined(VK_AMD_display_native_hdr)) + PFN_vkSetLocalDimmingAMD fp_vkSetLocalDimmingAMD = nullptr; +#else + void * fp_vkSetLocalDimmingAMD{}; +#endif +#if (defined(VK_KHR_calibrated_timestamps)) + PFN_vkGetCalibratedTimestampsKHR fp_vkGetCalibratedTimestampsKHR = nullptr; +#else + void * fp_vkGetCalibratedTimestampsKHR{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkSetDebugUtilsObjectNameEXT fp_vkSetDebugUtilsObjectNameEXT = nullptr; +#else + void * fp_vkSetDebugUtilsObjectNameEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkSetDebugUtilsObjectTagEXT fp_vkSetDebugUtilsObjectTagEXT = nullptr; +#else + void * fp_vkSetDebugUtilsObjectTagEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkQueueBeginDebugUtilsLabelEXT fp_vkQueueBeginDebugUtilsLabelEXT = nullptr; +#else + void * fp_vkQueueBeginDebugUtilsLabelEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkQueueEndDebugUtilsLabelEXT fp_vkQueueEndDebugUtilsLabelEXT = nullptr; +#else + void * fp_vkQueueEndDebugUtilsLabelEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkQueueInsertDebugUtilsLabelEXT fp_vkQueueInsertDebugUtilsLabelEXT = nullptr; +#else + void * fp_vkQueueInsertDebugUtilsLabelEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkCmdBeginDebugUtilsLabelEXT fp_vkCmdBeginDebugUtilsLabelEXT = nullptr; +#else + void * fp_vkCmdBeginDebugUtilsLabelEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkCmdEndDebugUtilsLabelEXT fp_vkCmdEndDebugUtilsLabelEXT = nullptr; +#else + void * fp_vkCmdEndDebugUtilsLabelEXT{}; +#endif +#if (defined(VK_EXT_debug_utils)) + PFN_vkCmdInsertDebugUtilsLabelEXT fp_vkCmdInsertDebugUtilsLabelEXT = nullptr; +#else + void * fp_vkCmdInsertDebugUtilsLabelEXT{}; +#endif +#if (defined(VK_EXT_external_memory_host)) + PFN_vkGetMemoryHostPointerPropertiesEXT fp_vkGetMemoryHostPointerPropertiesEXT = nullptr; +#else + void * fp_vkGetMemoryHostPointerPropertiesEXT{}; +#endif +#if (defined(VK_AMD_buffer_marker)) + PFN_vkCmdWriteBufferMarkerAMD fp_vkCmdWriteBufferMarkerAMD = nullptr; +#else + void * fp_vkCmdWriteBufferMarkerAMD{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkCreateRenderPass2 fp_vkCreateRenderPass2 = nullptr; +#else + void * fp_vkCreateRenderPass2{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkCmdBeginRenderPass2 fp_vkCmdBeginRenderPass2 = nullptr; +#else + void * fp_vkCmdBeginRenderPass2{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkCmdNextSubpass2 fp_vkCmdNextSubpass2 = nullptr; +#else + void * fp_vkCmdNextSubpass2{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkCmdEndRenderPass2 fp_vkCmdEndRenderPass2 = nullptr; +#else + void * fp_vkCmdEndRenderPass2{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkGetSemaphoreCounterValue fp_vkGetSemaphoreCounterValue = nullptr; +#else + void * fp_vkGetSemaphoreCounterValue{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkWaitSemaphores fp_vkWaitSemaphores = nullptr; +#else + void * fp_vkWaitSemaphores{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkSignalSemaphore fp_vkSignalSemaphore = nullptr; +#else + void * fp_vkSignalSemaphore{}; +#endif +#if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) + PFN_vkGetAndroidHardwareBufferPropertiesANDROID fp_vkGetAndroidHardwareBufferPropertiesANDROID = nullptr; +#else + void * fp_vkGetAndroidHardwareBufferPropertiesANDROID{}; +#endif +#if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) + PFN_vkGetMemoryAndroidHardwareBufferANDROID fp_vkGetMemoryAndroidHardwareBufferANDROID = nullptr; +#else + void * fp_vkGetMemoryAndroidHardwareBufferANDROID{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkCmdDrawIndirectCount fp_vkCmdDrawIndirectCount = nullptr; +#else + void * fp_vkCmdDrawIndirectCount{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkCmdDrawIndexedIndirectCount fp_vkCmdDrawIndexedIndirectCount = nullptr; +#else + void * fp_vkCmdDrawIndexedIndirectCount{}; +#endif +#if (defined(VK_NV_device_diagnostic_checkpoints)) + PFN_vkCmdSetCheckpointNV fp_vkCmdSetCheckpointNV = nullptr; +#else + void * fp_vkCmdSetCheckpointNV{}; +#endif +#if (defined(VK_NV_device_diagnostic_checkpoints)) + PFN_vkGetQueueCheckpointDataNV fp_vkGetQueueCheckpointDataNV = nullptr; +#else + void * fp_vkGetQueueCheckpointDataNV{}; +#endif +#if (defined(VK_EXT_transform_feedback)) + PFN_vkCmdBindTransformFeedbackBuffersEXT fp_vkCmdBindTransformFeedbackBuffersEXT = nullptr; +#else + void * fp_vkCmdBindTransformFeedbackBuffersEXT{}; +#endif +#if (defined(VK_EXT_transform_feedback)) + PFN_vkCmdBeginTransformFeedbackEXT fp_vkCmdBeginTransformFeedbackEXT = nullptr; +#else + void * fp_vkCmdBeginTransformFeedbackEXT{}; +#endif +#if (defined(VK_EXT_transform_feedback)) + PFN_vkCmdEndTransformFeedbackEXT fp_vkCmdEndTransformFeedbackEXT = nullptr; +#else + void * fp_vkCmdEndTransformFeedbackEXT{}; +#endif +#if (defined(VK_EXT_transform_feedback)) + PFN_vkCmdBeginQueryIndexedEXT fp_vkCmdBeginQueryIndexedEXT = nullptr; +#else + void * fp_vkCmdBeginQueryIndexedEXT{}; +#endif +#if (defined(VK_EXT_transform_feedback)) + PFN_vkCmdEndQueryIndexedEXT fp_vkCmdEndQueryIndexedEXT = nullptr; +#else + void * fp_vkCmdEndQueryIndexedEXT{}; +#endif +#if (defined(VK_EXT_transform_feedback)) + PFN_vkCmdDrawIndirectByteCountEXT fp_vkCmdDrawIndirectByteCountEXT = nullptr; +#else + void * fp_vkCmdDrawIndirectByteCountEXT{}; +#endif +#if (defined(VK_NV_scissor_exclusive)) + PFN_vkCmdSetExclusiveScissorNV fp_vkCmdSetExclusiveScissorNV = nullptr; +#else + void * fp_vkCmdSetExclusiveScissorNV{}; +#endif +#if ((defined(VK_NV_scissor_exclusive))) && VK_HEADER_VERSION >= 241 + PFN_vkCmdSetExclusiveScissorEnableNV fp_vkCmdSetExclusiveScissorEnableNV = nullptr; +#else + void * fp_vkCmdSetExclusiveScissorEnableNV{}; +#endif +#if (defined(VK_NV_shading_rate_image)) + PFN_vkCmdBindShadingRateImageNV fp_vkCmdBindShadingRateImageNV = nullptr; +#else + void * fp_vkCmdBindShadingRateImageNV{}; +#endif +#if (defined(VK_NV_shading_rate_image)) + PFN_vkCmdSetViewportShadingRatePaletteNV fp_vkCmdSetViewportShadingRatePaletteNV = nullptr; +#else + void * fp_vkCmdSetViewportShadingRatePaletteNV{}; +#endif +#if (defined(VK_NV_shading_rate_image)) + PFN_vkCmdSetCoarseSampleOrderNV fp_vkCmdSetCoarseSampleOrderNV = nullptr; +#else + void * fp_vkCmdSetCoarseSampleOrderNV{}; +#endif +#if (defined(VK_NV_mesh_shader)) + PFN_vkCmdDrawMeshTasksNV fp_vkCmdDrawMeshTasksNV = nullptr; +#else + void * fp_vkCmdDrawMeshTasksNV{}; +#endif +#if (defined(VK_NV_mesh_shader)) + PFN_vkCmdDrawMeshTasksIndirectNV fp_vkCmdDrawMeshTasksIndirectNV = nullptr; +#else + void * fp_vkCmdDrawMeshTasksIndirectNV{}; +#endif +#if (defined(VK_NV_mesh_shader)) + PFN_vkCmdDrawMeshTasksIndirectCountNV fp_vkCmdDrawMeshTasksIndirectCountNV = nullptr; +#else + void * fp_vkCmdDrawMeshTasksIndirectCountNV{}; +#endif +#if (defined(VK_EXT_mesh_shader)) + PFN_vkCmdDrawMeshTasksEXT fp_vkCmdDrawMeshTasksEXT = nullptr; +#else + void * fp_vkCmdDrawMeshTasksEXT{}; +#endif +#if (defined(VK_EXT_mesh_shader)) + PFN_vkCmdDrawMeshTasksIndirectEXT fp_vkCmdDrawMeshTasksIndirectEXT = nullptr; +#else + void * fp_vkCmdDrawMeshTasksIndirectEXT{}; +#endif +#if (defined(VK_EXT_mesh_shader)) + PFN_vkCmdDrawMeshTasksIndirectCountEXT fp_vkCmdDrawMeshTasksIndirectCountEXT = nullptr; +#else + void * fp_vkCmdDrawMeshTasksIndirectCountEXT{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCompileDeferredNV fp_vkCompileDeferredNV = nullptr; +#else + void * fp_vkCompileDeferredNV{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCreateAccelerationStructureNV fp_vkCreateAccelerationStructureNV = nullptr; +#else + void * fp_vkCreateAccelerationStructureNV{}; +#endif +#if (defined(VK_HUAWEI_invocation_mask)) + PFN_vkCmdBindInvocationMaskHUAWEI fp_vkCmdBindInvocationMaskHUAWEI = nullptr; +#else + void * fp_vkCmdBindInvocationMaskHUAWEI{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkDestroyAccelerationStructureKHR fp_vkDestroyAccelerationStructureKHR = nullptr; +#else + void * fp_vkDestroyAccelerationStructureKHR{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkDestroyAccelerationStructureNV fp_vkDestroyAccelerationStructureNV = nullptr; +#else + void * fp_vkDestroyAccelerationStructureNV{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkGetAccelerationStructureMemoryRequirementsNV fp_vkGetAccelerationStructureMemoryRequirementsNV = nullptr; +#else + void * fp_vkGetAccelerationStructureMemoryRequirementsNV{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkBindAccelerationStructureMemoryNV fp_vkBindAccelerationStructureMemoryNV = nullptr; +#else + void * fp_vkBindAccelerationStructureMemoryNV{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCmdCopyAccelerationStructureNV fp_vkCmdCopyAccelerationStructureNV = nullptr; +#else + void * fp_vkCmdCopyAccelerationStructureNV{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCmdCopyAccelerationStructureKHR fp_vkCmdCopyAccelerationStructureKHR = nullptr; +#else + void * fp_vkCmdCopyAccelerationStructureKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCopyAccelerationStructureKHR fp_vkCopyAccelerationStructureKHR = nullptr; +#else + void * fp_vkCopyAccelerationStructureKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCmdCopyAccelerationStructureToMemoryKHR fp_vkCmdCopyAccelerationStructureToMemoryKHR = nullptr; +#else + void * fp_vkCmdCopyAccelerationStructureToMemoryKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCopyAccelerationStructureToMemoryKHR fp_vkCopyAccelerationStructureToMemoryKHR = nullptr; +#else + void * fp_vkCopyAccelerationStructureToMemoryKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCmdCopyMemoryToAccelerationStructureKHR fp_vkCmdCopyMemoryToAccelerationStructureKHR = nullptr; +#else + void * fp_vkCmdCopyMemoryToAccelerationStructureKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCopyMemoryToAccelerationStructureKHR fp_vkCopyMemoryToAccelerationStructureKHR = nullptr; +#else + void * fp_vkCopyMemoryToAccelerationStructureKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR fp_vkCmdWriteAccelerationStructuresPropertiesKHR = nullptr; +#else + void * fp_vkCmdWriteAccelerationStructuresPropertiesKHR{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCmdWriteAccelerationStructuresPropertiesNV fp_vkCmdWriteAccelerationStructuresPropertiesNV = nullptr; +#else + void * fp_vkCmdWriteAccelerationStructuresPropertiesNV{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCmdBuildAccelerationStructureNV fp_vkCmdBuildAccelerationStructureNV = nullptr; +#else + void * fp_vkCmdBuildAccelerationStructureNV{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkWriteAccelerationStructuresPropertiesKHR fp_vkWriteAccelerationStructuresPropertiesKHR = nullptr; +#else + void * fp_vkWriteAccelerationStructuresPropertiesKHR{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkCmdTraceRaysKHR fp_vkCmdTraceRaysKHR = nullptr; +#else + void * fp_vkCmdTraceRaysKHR{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCmdTraceRaysNV fp_vkCmdTraceRaysNV = nullptr; +#else + void * fp_vkCmdTraceRaysNV{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkGetRayTracingShaderGroupHandlesKHR fp_vkGetRayTracingShaderGroupHandlesKHR = nullptr; +#else + void * fp_vkGetRayTracingShaderGroupHandlesKHR{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = nullptr; +#else + void * fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkGetAccelerationStructureHandleNV fp_vkGetAccelerationStructureHandleNV = nullptr; +#else + void * fp_vkGetAccelerationStructureHandleNV{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkCreateRayTracingPipelinesNV fp_vkCreateRayTracingPipelinesNV = nullptr; +#else + void * fp_vkCreateRayTracingPipelinesNV{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkCreateRayTracingPipelinesKHR fp_vkCreateRayTracingPipelinesKHR = nullptr; +#else + void * fp_vkCreateRayTracingPipelinesKHR{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkCmdTraceRaysIndirectKHR fp_vkCmdTraceRaysIndirectKHR = nullptr; +#else + void * fp_vkCmdTraceRaysIndirectKHR{}; +#endif +#if (defined(VK_KHR_ray_tracing_maintenance1)) + PFN_vkCmdTraceRaysIndirect2KHR fp_vkCmdTraceRaysIndirect2KHR = nullptr; +#else + void * fp_vkCmdTraceRaysIndirect2KHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR fp_vkGetDeviceAccelerationStructureCompatibilityKHR = nullptr; +#else + void * fp_vkGetDeviceAccelerationStructureCompatibilityKHR{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkGetRayTracingShaderGroupStackSizeKHR fp_vkGetRayTracingShaderGroupStackSizeKHR = nullptr; +#else + void * fp_vkGetRayTracingShaderGroupStackSizeKHR{}; +#endif +#if (defined(VK_KHR_ray_tracing_pipeline)) + PFN_vkCmdSetRayTracingPipelineStackSizeKHR fp_vkCmdSetRayTracingPipelineStackSizeKHR = nullptr; +#else + void * fp_vkCmdSetRayTracingPipelineStackSizeKHR{}; +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + PFN_vkGetDeviceGroupSurfacePresentModes2EXT fp_vkGetDeviceGroupSurfacePresentModes2EXT = nullptr; +#else + void * fp_vkGetDeviceGroupSurfacePresentModes2EXT{}; +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + PFN_vkAcquireFullScreenExclusiveModeEXT fp_vkAcquireFullScreenExclusiveModeEXT = nullptr; +#else + void * fp_vkAcquireFullScreenExclusiveModeEXT{}; +#endif +#if (defined(VK_EXT_full_screen_exclusive)) + PFN_vkReleaseFullScreenExclusiveModeEXT fp_vkReleaseFullScreenExclusiveModeEXT = nullptr; +#else + void * fp_vkReleaseFullScreenExclusiveModeEXT{}; +#endif +#if (defined(VK_KHR_performance_query)) + PFN_vkAcquireProfilingLockKHR fp_vkAcquireProfilingLockKHR = nullptr; +#else + void * fp_vkAcquireProfilingLockKHR{}; +#endif +#if (defined(VK_KHR_performance_query)) + PFN_vkReleaseProfilingLockKHR fp_vkReleaseProfilingLockKHR = nullptr; +#else + void * fp_vkReleaseProfilingLockKHR{}; +#endif +#if (defined(VK_EXT_image_drm_format_modifier)) + PFN_vkGetImageDrmFormatModifierPropertiesEXT fp_vkGetImageDrmFormatModifierPropertiesEXT = nullptr; +#else + void * fp_vkGetImageDrmFormatModifierPropertiesEXT{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkGetBufferOpaqueCaptureAddress fp_vkGetBufferOpaqueCaptureAddress = nullptr; +#else + void * fp_vkGetBufferOpaqueCaptureAddress{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkGetBufferDeviceAddress fp_vkGetBufferDeviceAddress = nullptr; +#else + void * fp_vkGetBufferDeviceAddress{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkInitializePerformanceApiINTEL fp_vkInitializePerformanceApiINTEL = nullptr; +#else + void * fp_vkInitializePerformanceApiINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkUninitializePerformanceApiINTEL fp_vkUninitializePerformanceApiINTEL = nullptr; +#else + void * fp_vkUninitializePerformanceApiINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkCmdSetPerformanceMarkerINTEL fp_vkCmdSetPerformanceMarkerINTEL = nullptr; +#else + void * fp_vkCmdSetPerformanceMarkerINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkCmdSetPerformanceStreamMarkerINTEL fp_vkCmdSetPerformanceStreamMarkerINTEL = nullptr; +#else + void * fp_vkCmdSetPerformanceStreamMarkerINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkCmdSetPerformanceOverrideINTEL fp_vkCmdSetPerformanceOverrideINTEL = nullptr; +#else + void * fp_vkCmdSetPerformanceOverrideINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkAcquirePerformanceConfigurationINTEL fp_vkAcquirePerformanceConfigurationINTEL = nullptr; +#else + void * fp_vkAcquirePerformanceConfigurationINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkReleasePerformanceConfigurationINTEL fp_vkReleasePerformanceConfigurationINTEL = nullptr; +#else + void * fp_vkReleasePerformanceConfigurationINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkQueueSetPerformanceConfigurationINTEL fp_vkQueueSetPerformanceConfigurationINTEL = nullptr; +#else + void * fp_vkQueueSetPerformanceConfigurationINTEL{}; +#endif +#if (defined(VK_INTEL_performance_query)) + PFN_vkGetPerformanceParameterINTEL fp_vkGetPerformanceParameterINTEL = nullptr; +#else + void * fp_vkGetPerformanceParameterINTEL{}; +#endif +#if (defined(VK_VERSION_1_2)) + PFN_vkGetDeviceMemoryOpaqueCaptureAddress fp_vkGetDeviceMemoryOpaqueCaptureAddress = nullptr; +#else + void * fp_vkGetDeviceMemoryOpaqueCaptureAddress{}; +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + PFN_vkGetPipelineExecutablePropertiesKHR fp_vkGetPipelineExecutablePropertiesKHR = nullptr; +#else + void * fp_vkGetPipelineExecutablePropertiesKHR{}; +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + PFN_vkGetPipelineExecutableStatisticsKHR fp_vkGetPipelineExecutableStatisticsKHR = nullptr; +#else + void * fp_vkGetPipelineExecutableStatisticsKHR{}; +#endif +#if (defined(VK_KHR_pipeline_executable_properties)) + PFN_vkGetPipelineExecutableInternalRepresentationsKHR fp_vkGetPipelineExecutableInternalRepresentationsKHR = nullptr; +#else + void * fp_vkGetPipelineExecutableInternalRepresentationsKHR{}; +#endif +#if (defined(VK_KHR_line_rasterization)) + PFN_vkCmdSetLineStippleKHR fp_vkCmdSetLineStippleKHR = nullptr; +#else + void * fp_vkCmdSetLineStippleKHR{}; +#endif +#if (defined(VKSC_VERSION_1_0)) + PFN_vkGetFaultData fp_vkGetFaultData = nullptr; +#else + void * fp_vkGetFaultData{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCreateAccelerationStructureKHR fp_vkCreateAccelerationStructureKHR = nullptr; +#else + void * fp_vkCreateAccelerationStructureKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCmdBuildAccelerationStructuresKHR fp_vkCmdBuildAccelerationStructuresKHR = nullptr; +#else + void * fp_vkCmdBuildAccelerationStructuresKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkCmdBuildAccelerationStructuresIndirectKHR fp_vkCmdBuildAccelerationStructuresIndirectKHR = nullptr; +#else + void * fp_vkCmdBuildAccelerationStructuresIndirectKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkBuildAccelerationStructuresKHR fp_vkBuildAccelerationStructuresKHR = nullptr; +#else + void * fp_vkBuildAccelerationStructuresKHR{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkGetAccelerationStructureDeviceAddressKHR fp_vkGetAccelerationStructureDeviceAddressKHR = nullptr; +#else + void * fp_vkGetAccelerationStructureDeviceAddressKHR{}; +#endif +#if (defined(VK_KHR_deferred_host_operations)) + PFN_vkCreateDeferredOperationKHR fp_vkCreateDeferredOperationKHR = nullptr; +#else + void * fp_vkCreateDeferredOperationKHR{}; +#endif +#if (defined(VK_KHR_deferred_host_operations)) + PFN_vkDestroyDeferredOperationKHR fp_vkDestroyDeferredOperationKHR = nullptr; +#else + void * fp_vkDestroyDeferredOperationKHR{}; +#endif +#if (defined(VK_KHR_deferred_host_operations)) + PFN_vkGetDeferredOperationMaxConcurrencyKHR fp_vkGetDeferredOperationMaxConcurrencyKHR = nullptr; +#else + void * fp_vkGetDeferredOperationMaxConcurrencyKHR{}; +#endif +#if (defined(VK_KHR_deferred_host_operations)) + PFN_vkGetDeferredOperationResultKHR fp_vkGetDeferredOperationResultKHR = nullptr; +#else + void * fp_vkGetDeferredOperationResultKHR{}; +#endif +#if (defined(VK_KHR_deferred_host_operations)) + PFN_vkDeferredOperationJoinKHR fp_vkDeferredOperationJoinKHR = nullptr; +#else + void * fp_vkDeferredOperationJoinKHR{}; +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + PFN_vkGetPipelineIndirectMemoryRequirementsNV fp_vkGetPipelineIndirectMemoryRequirementsNV = nullptr; +#else + void * fp_vkGetPipelineIndirectMemoryRequirementsNV{}; +#endif +#if (defined(VK_NV_device_generated_commands_compute)) + PFN_vkGetPipelineIndirectDeviceAddressNV fp_vkGetPipelineIndirectDeviceAddressNV = nullptr; +#else + void * fp_vkGetPipelineIndirectDeviceAddressNV{}; +#endif +#if (defined(VK_AMD_anti_lag)) + PFN_vkAntiLagUpdateAMD fp_vkAntiLagUpdateAMD = nullptr; +#else + void * fp_vkAntiLagUpdateAMD{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetCullMode fp_vkCmdSetCullMode = nullptr; +#else + void * fp_vkCmdSetCullMode{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetFrontFace fp_vkCmdSetFrontFace = nullptr; +#else + void * fp_vkCmdSetFrontFace{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetPrimitiveTopology fp_vkCmdSetPrimitiveTopology = nullptr; +#else + void * fp_vkCmdSetPrimitiveTopology{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetViewportWithCount fp_vkCmdSetViewportWithCount = nullptr; +#else + void * fp_vkCmdSetViewportWithCount{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetScissorWithCount fp_vkCmdSetScissorWithCount = nullptr; +#else + void * fp_vkCmdSetScissorWithCount{}; +#endif +#if (defined(VK_KHR_maintenance5)) + PFN_vkCmdBindIndexBuffer2KHR fp_vkCmdBindIndexBuffer2KHR = nullptr; +#else + void * fp_vkCmdBindIndexBuffer2KHR{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdBindVertexBuffers2 fp_vkCmdBindVertexBuffers2 = nullptr; +#else + void * fp_vkCmdBindVertexBuffers2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetDepthTestEnable fp_vkCmdSetDepthTestEnable = nullptr; +#else + void * fp_vkCmdSetDepthTestEnable{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetDepthWriteEnable fp_vkCmdSetDepthWriteEnable = nullptr; +#else + void * fp_vkCmdSetDepthWriteEnable{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetDepthCompareOp fp_vkCmdSetDepthCompareOp = nullptr; +#else + void * fp_vkCmdSetDepthCompareOp{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetDepthBoundsTestEnable fp_vkCmdSetDepthBoundsTestEnable = nullptr; +#else + void * fp_vkCmdSetDepthBoundsTestEnable{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetStencilTestEnable fp_vkCmdSetStencilTestEnable = nullptr; +#else + void * fp_vkCmdSetStencilTestEnable{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetStencilOp fp_vkCmdSetStencilOp = nullptr; +#else + void * fp_vkCmdSetStencilOp{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetPatchControlPointsEXT fp_vkCmdSetPatchControlPointsEXT = nullptr; +#else + void * fp_vkCmdSetPatchControlPointsEXT{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetRasterizerDiscardEnable fp_vkCmdSetRasterizerDiscardEnable = nullptr; +#else + void * fp_vkCmdSetRasterizerDiscardEnable{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetDepthBiasEnable fp_vkCmdSetDepthBiasEnable = nullptr; +#else + void * fp_vkCmdSetDepthBiasEnable{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetLogicOpEXT fp_vkCmdSetLogicOpEXT = nullptr; +#else + void * fp_vkCmdSetLogicOpEXT{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetPrimitiveRestartEnable fp_vkCmdSetPrimitiveRestartEnable = nullptr; +#else + void * fp_vkCmdSetPrimitiveRestartEnable{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetTessellationDomainOriginEXT fp_vkCmdSetTessellationDomainOriginEXT = nullptr; +#else + void * fp_vkCmdSetTessellationDomainOriginEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthClampEnableEXT fp_vkCmdSetDepthClampEnableEXT = nullptr; +#else + void * fp_vkCmdSetDepthClampEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetPolygonModeEXT fp_vkCmdSetPolygonModeEXT = nullptr; +#else + void * fp_vkCmdSetPolygonModeEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetRasterizationSamplesEXT fp_vkCmdSetRasterizationSamplesEXT = nullptr; +#else + void * fp_vkCmdSetRasterizationSamplesEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetSampleMaskEXT fp_vkCmdSetSampleMaskEXT = nullptr; +#else + void * fp_vkCmdSetSampleMaskEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetAlphaToCoverageEnableEXT fp_vkCmdSetAlphaToCoverageEnableEXT = nullptr; +#else + void * fp_vkCmdSetAlphaToCoverageEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetAlphaToOneEnableEXT fp_vkCmdSetAlphaToOneEnableEXT = nullptr; +#else + void * fp_vkCmdSetAlphaToOneEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetLogicOpEnableEXT fp_vkCmdSetLogicOpEnableEXT = nullptr; +#else + void * fp_vkCmdSetLogicOpEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetColorBlendEnableEXT fp_vkCmdSetColorBlendEnableEXT = nullptr; +#else + void * fp_vkCmdSetColorBlendEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetColorBlendEquationEXT fp_vkCmdSetColorBlendEquationEXT = nullptr; +#else + void * fp_vkCmdSetColorBlendEquationEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetColorWriteMaskEXT fp_vkCmdSetColorWriteMaskEXT = nullptr; +#else + void * fp_vkCmdSetColorWriteMaskEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetRasterizationStreamEXT fp_vkCmdSetRasterizationStreamEXT = nullptr; +#else + void * fp_vkCmdSetRasterizationStreamEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetConservativeRasterizationModeEXT fp_vkCmdSetConservativeRasterizationModeEXT = nullptr; +#else + void * fp_vkCmdSetConservativeRasterizationModeEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT = nullptr; +#else + void * fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthClipEnableEXT fp_vkCmdSetDepthClipEnableEXT = nullptr; +#else + void * fp_vkCmdSetDepthClipEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetSampleLocationsEnableEXT fp_vkCmdSetSampleLocationsEnableEXT = nullptr; +#else + void * fp_vkCmdSetSampleLocationsEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetColorBlendAdvancedEXT fp_vkCmdSetColorBlendAdvancedEXT = nullptr; +#else + void * fp_vkCmdSetColorBlendAdvancedEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetProvokingVertexModeEXT fp_vkCmdSetProvokingVertexModeEXT = nullptr; +#else + void * fp_vkCmdSetProvokingVertexModeEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetLineRasterizationModeEXT fp_vkCmdSetLineRasterizationModeEXT = nullptr; +#else + void * fp_vkCmdSetLineRasterizationModeEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetLineStippleEnableEXT fp_vkCmdSetLineStippleEnableEXT = nullptr; +#else + void * fp_vkCmdSetLineStippleEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthClipNegativeOneToOneEXT fp_vkCmdSetDepthClipNegativeOneToOneEXT = nullptr; +#else + void * fp_vkCmdSetDepthClipNegativeOneToOneEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetViewportWScalingEnableNV fp_vkCmdSetViewportWScalingEnableNV = nullptr; +#else + void * fp_vkCmdSetViewportWScalingEnableNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetViewportSwizzleNV fp_vkCmdSetViewportSwizzleNV = nullptr; +#else + void * fp_vkCmdSetViewportSwizzleNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCoverageToColorEnableNV fp_vkCmdSetCoverageToColorEnableNV = nullptr; +#else + void * fp_vkCmdSetCoverageToColorEnableNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCoverageToColorLocationNV fp_vkCmdSetCoverageToColorLocationNV = nullptr; +#else + void * fp_vkCmdSetCoverageToColorLocationNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCoverageModulationModeNV fp_vkCmdSetCoverageModulationModeNV = nullptr; +#else + void * fp_vkCmdSetCoverageModulationModeNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCoverageModulationTableEnableNV fp_vkCmdSetCoverageModulationTableEnableNV = nullptr; +#else + void * fp_vkCmdSetCoverageModulationTableEnableNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCoverageModulationTableNV fp_vkCmdSetCoverageModulationTableNV = nullptr; +#else + void * fp_vkCmdSetCoverageModulationTableNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetShadingRateImageEnableNV fp_vkCmdSetShadingRateImageEnableNV = nullptr; +#else + void * fp_vkCmdSetShadingRateImageEnableNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCoverageReductionModeNV fp_vkCmdSetCoverageReductionModeNV = nullptr; +#else + void * fp_vkCmdSetCoverageReductionModeNV{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetRepresentativeFragmentTestEnableNV fp_vkCmdSetRepresentativeFragmentTestEnableNV = nullptr; +#else + void * fp_vkCmdSetRepresentativeFragmentTestEnableNV{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCreatePrivateDataSlot fp_vkCreatePrivateDataSlot = nullptr; +#else + void * fp_vkCreatePrivateDataSlot{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkDestroyPrivateDataSlot fp_vkDestroyPrivateDataSlot = nullptr; +#else + void * fp_vkDestroyPrivateDataSlot{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkSetPrivateData fp_vkSetPrivateData = nullptr; +#else + void * fp_vkSetPrivateData{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkGetPrivateData fp_vkGetPrivateData = nullptr; +#else + void * fp_vkGetPrivateData{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdCopyBuffer2 fp_vkCmdCopyBuffer2 = nullptr; +#else + void * fp_vkCmdCopyBuffer2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdCopyImage2 fp_vkCmdCopyImage2 = nullptr; +#else + void * fp_vkCmdCopyImage2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdBlitImage2 fp_vkCmdBlitImage2 = nullptr; +#else + void * fp_vkCmdBlitImage2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdCopyBufferToImage2 fp_vkCmdCopyBufferToImage2 = nullptr; +#else + void * fp_vkCmdCopyBufferToImage2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdCopyImageToBuffer2 fp_vkCmdCopyImageToBuffer2 = nullptr; +#else + void * fp_vkCmdCopyImageToBuffer2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdResolveImage2 fp_vkCmdResolveImage2 = nullptr; +#else + void * fp_vkCmdResolveImage2{}; +#endif +#if (defined(VK_KHR_object_refresh)) + PFN_vkCmdRefreshObjectsKHR fp_vkCmdRefreshObjectsKHR = nullptr; +#else + void * fp_vkCmdRefreshObjectsKHR{}; +#endif +#if (defined(VK_KHR_fragment_shading_rate)) + PFN_vkCmdSetFragmentShadingRateKHR fp_vkCmdSetFragmentShadingRateKHR = nullptr; +#else + void * fp_vkCmdSetFragmentShadingRateKHR{}; +#endif +#if (defined(VK_NV_fragment_shading_rate_enums)) + PFN_vkCmdSetFragmentShadingRateEnumNV fp_vkCmdSetFragmentShadingRateEnumNV = nullptr; +#else + void * fp_vkCmdSetFragmentShadingRateEnumNV{}; +#endif +#if (defined(VK_KHR_acceleration_structure)) + PFN_vkGetAccelerationStructureBuildSizesKHR fp_vkGetAccelerationStructureBuildSizesKHR = nullptr; +#else + void * fp_vkGetAccelerationStructureBuildSizesKHR{}; +#endif +#if (defined(VK_EXT_vertex_input_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetVertexInputEXT fp_vkCmdSetVertexInputEXT = nullptr; +#else + void * fp_vkCmdSetVertexInputEXT{}; +#endif +#if (defined(VK_EXT_color_write_enable)) + PFN_vkCmdSetColorWriteEnableEXT fp_vkCmdSetColorWriteEnableEXT = nullptr; +#else + void * fp_vkCmdSetColorWriteEnableEXT{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdSetEvent2 fp_vkCmdSetEvent2 = nullptr; +#else + void * fp_vkCmdSetEvent2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdResetEvent2 fp_vkCmdResetEvent2 = nullptr; +#else + void * fp_vkCmdResetEvent2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdWaitEvents2 fp_vkCmdWaitEvents2 = nullptr; +#else + void * fp_vkCmdWaitEvents2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdPipelineBarrier2 fp_vkCmdPipelineBarrier2 = nullptr; +#else + void * fp_vkCmdPipelineBarrier2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkQueueSubmit2 fp_vkQueueSubmit2 = nullptr; +#else + void * fp_vkQueueSubmit2{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdWriteTimestamp2 fp_vkCmdWriteTimestamp2 = nullptr; +#else + void * fp_vkCmdWriteTimestamp2{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkCmdWriteBufferMarker2AMD fp_vkCmdWriteBufferMarker2AMD = nullptr; +#else + void * fp_vkCmdWriteBufferMarker2AMD{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkGetQueueCheckpointData2NV fp_vkGetQueueCheckpointData2NV = nullptr; +#else + void * fp_vkGetQueueCheckpointData2NV{}; +#endif +#if (defined(VK_EXT_host_image_copy)) + PFN_vkCopyMemoryToImageEXT fp_vkCopyMemoryToImageEXT = nullptr; +#else + void * fp_vkCopyMemoryToImageEXT{}; +#endif +#if (defined(VK_EXT_host_image_copy)) + PFN_vkCopyImageToMemoryEXT fp_vkCopyImageToMemoryEXT = nullptr; +#else + void * fp_vkCopyImageToMemoryEXT{}; +#endif +#if (defined(VK_EXT_host_image_copy)) + PFN_vkCopyImageToImageEXT fp_vkCopyImageToImageEXT = nullptr; +#else + void * fp_vkCopyImageToImageEXT{}; +#endif +#if (defined(VK_EXT_host_image_copy)) + PFN_vkTransitionImageLayoutEXT fp_vkTransitionImageLayoutEXT = nullptr; +#else + void * fp_vkTransitionImageLayoutEXT{}; +#endif +#if (defined(VKSC_VERSION_1_0)) + PFN_vkGetCommandPoolMemoryConsumption fp_vkGetCommandPoolMemoryConsumption = nullptr; +#else + void * fp_vkGetCommandPoolMemoryConsumption{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkCreateVideoSessionKHR fp_vkCreateVideoSessionKHR = nullptr; +#else + void * fp_vkCreateVideoSessionKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkDestroyVideoSessionKHR fp_vkDestroyVideoSessionKHR = nullptr; +#else + void * fp_vkDestroyVideoSessionKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkCreateVideoSessionParametersKHR fp_vkCreateVideoSessionParametersKHR = nullptr; +#else + void * fp_vkCreateVideoSessionParametersKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkUpdateVideoSessionParametersKHR fp_vkUpdateVideoSessionParametersKHR = nullptr; +#else + void * fp_vkUpdateVideoSessionParametersKHR{}; +#endif +#if (defined(VK_KHR_video_encode_queue)) + PFN_vkGetEncodedVideoSessionParametersKHR fp_vkGetEncodedVideoSessionParametersKHR = nullptr; +#else + void * fp_vkGetEncodedVideoSessionParametersKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkDestroyVideoSessionParametersKHR fp_vkDestroyVideoSessionParametersKHR = nullptr; +#else + void * fp_vkDestroyVideoSessionParametersKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkGetVideoSessionMemoryRequirementsKHR fp_vkGetVideoSessionMemoryRequirementsKHR = nullptr; +#else + void * fp_vkGetVideoSessionMemoryRequirementsKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkBindVideoSessionMemoryKHR fp_vkBindVideoSessionMemoryKHR = nullptr; +#else + void * fp_vkBindVideoSessionMemoryKHR{}; +#endif +#if (defined(VK_KHR_video_decode_queue)) + PFN_vkCmdDecodeVideoKHR fp_vkCmdDecodeVideoKHR = nullptr; +#else + void * fp_vkCmdDecodeVideoKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkCmdBeginVideoCodingKHR fp_vkCmdBeginVideoCodingKHR = nullptr; +#else + void * fp_vkCmdBeginVideoCodingKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkCmdControlVideoCodingKHR fp_vkCmdControlVideoCodingKHR = nullptr; +#else + void * fp_vkCmdControlVideoCodingKHR{}; +#endif +#if (defined(VK_KHR_video_queue)) + PFN_vkCmdEndVideoCodingKHR fp_vkCmdEndVideoCodingKHR = nullptr; +#else + void * fp_vkCmdEndVideoCodingKHR{}; +#endif +#if (defined(VK_KHR_video_encode_queue)) + PFN_vkCmdEncodeVideoKHR fp_vkCmdEncodeVideoKHR = nullptr; +#else + void * fp_vkCmdEncodeVideoKHR{}; +#endif +#if (defined(VK_NV_memory_decompression)) + PFN_vkCmdDecompressMemoryNV fp_vkCmdDecompressMemoryNV = nullptr; +#else + void * fp_vkCmdDecompressMemoryNV{}; +#endif +#if (defined(VK_NV_memory_decompression)) + PFN_vkCmdDecompressMemoryIndirectCountNV fp_vkCmdDecompressMemoryIndirectCountNV = nullptr; +#else + void * fp_vkCmdDecompressMemoryIndirectCountNV{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetDescriptorSetLayoutSizeEXT fp_vkGetDescriptorSetLayoutSizeEXT = nullptr; +#else + void * fp_vkGetDescriptorSetLayoutSizeEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT fp_vkGetDescriptorSetLayoutBindingOffsetEXT = nullptr; +#else + void * fp_vkGetDescriptorSetLayoutBindingOffsetEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetDescriptorEXT fp_vkGetDescriptorEXT = nullptr; +#else + void * fp_vkGetDescriptorEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkCmdBindDescriptorBuffersEXT fp_vkCmdBindDescriptorBuffersEXT = nullptr; +#else + void * fp_vkCmdBindDescriptorBuffersEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkCmdSetDescriptorBufferOffsetsEXT fp_vkCmdSetDescriptorBufferOffsetsEXT = nullptr; +#else + void * fp_vkCmdSetDescriptorBufferOffsetsEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = nullptr; +#else + void * fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT fp_vkGetBufferOpaqueCaptureDescriptorDataEXT = nullptr; +#else + void * fp_vkGetBufferOpaqueCaptureDescriptorDataEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT fp_vkGetImageOpaqueCaptureDescriptorDataEXT = nullptr; +#else + void * fp_vkGetImageOpaqueCaptureDescriptorDataEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT = nullptr; +#else + void * fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT = nullptr; +#else + void * fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT{}; +#endif +#if (defined(VK_EXT_descriptor_buffer)) + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = nullptr; +#else + void * fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT{}; +#endif +#if (defined(VK_EXT_pageable_device_local_memory)) + PFN_vkSetDeviceMemoryPriorityEXT fp_vkSetDeviceMemoryPriorityEXT = nullptr; +#else + void * fp_vkSetDeviceMemoryPriorityEXT{}; +#endif +#if (defined(VK_KHR_present_wait)) + PFN_vkWaitForPresentKHR fp_vkWaitForPresentKHR = nullptr; +#else + void * fp_vkWaitForPresentKHR{}; +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + PFN_vkCreateBufferCollectionFUCHSIA fp_vkCreateBufferCollectionFUCHSIA = nullptr; +#else + void * fp_vkCreateBufferCollectionFUCHSIA{}; +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA fp_vkSetBufferCollectionBufferConstraintsFUCHSIA = nullptr; +#else + void * fp_vkSetBufferCollectionBufferConstraintsFUCHSIA{}; +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA fp_vkSetBufferCollectionImageConstraintsFUCHSIA = nullptr; +#else + void * fp_vkSetBufferCollectionImageConstraintsFUCHSIA{}; +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + PFN_vkDestroyBufferCollectionFUCHSIA fp_vkDestroyBufferCollectionFUCHSIA = nullptr; +#else + void * fp_vkDestroyBufferCollectionFUCHSIA{}; +#endif +#if (defined(VK_FUCHSIA_buffer_collection)) + PFN_vkGetBufferCollectionPropertiesFUCHSIA fp_vkGetBufferCollectionPropertiesFUCHSIA = nullptr; +#else + void * fp_vkGetBufferCollectionPropertiesFUCHSIA{}; +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + PFN_vkCreateCudaModuleNV fp_vkCreateCudaModuleNV = nullptr; +#else + void * fp_vkCreateCudaModuleNV{}; +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + PFN_vkGetCudaModuleCacheNV fp_vkGetCudaModuleCacheNV = nullptr; +#else + void * fp_vkGetCudaModuleCacheNV{}; +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + PFN_vkCreateCudaFunctionNV fp_vkCreateCudaFunctionNV = nullptr; +#else + void * fp_vkCreateCudaFunctionNV{}; +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + PFN_vkDestroyCudaModuleNV fp_vkDestroyCudaModuleNV = nullptr; +#else + void * fp_vkDestroyCudaModuleNV{}; +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + PFN_vkDestroyCudaFunctionNV fp_vkDestroyCudaFunctionNV = nullptr; +#else + void * fp_vkDestroyCudaFunctionNV{}; +#endif +#if (defined(VK_NV_cuda_kernel_launch)) + PFN_vkCmdCudaLaunchKernelNV fp_vkCmdCudaLaunchKernelNV = nullptr; +#else + void * fp_vkCmdCudaLaunchKernelNV{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdBeginRendering fp_vkCmdBeginRendering = nullptr; +#else + void * fp_vkCmdBeginRendering{}; +#endif +#if (defined(VK_VERSION_1_3)) + PFN_vkCmdEndRendering fp_vkCmdEndRendering = nullptr; +#else + void * fp_vkCmdEndRendering{}; +#endif +#if (defined(VK_VALVE_descriptor_set_host_mapping)) + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE = nullptr; +#else + void * fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE{}; +#endif +#if (defined(VK_VALVE_descriptor_set_host_mapping)) + PFN_vkGetDescriptorSetHostMappingVALVE fp_vkGetDescriptorSetHostMappingVALVE = nullptr; +#else + void * fp_vkGetDescriptorSetHostMappingVALVE{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCreateMicromapEXT fp_vkCreateMicromapEXT = nullptr; +#else + void * fp_vkCreateMicromapEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCmdBuildMicromapsEXT fp_vkCmdBuildMicromapsEXT = nullptr; +#else + void * fp_vkCmdBuildMicromapsEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkBuildMicromapsEXT fp_vkBuildMicromapsEXT = nullptr; +#else + void * fp_vkBuildMicromapsEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkDestroyMicromapEXT fp_vkDestroyMicromapEXT = nullptr; +#else + void * fp_vkDestroyMicromapEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCmdCopyMicromapEXT fp_vkCmdCopyMicromapEXT = nullptr; +#else + void * fp_vkCmdCopyMicromapEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCopyMicromapEXT fp_vkCopyMicromapEXT = nullptr; +#else + void * fp_vkCopyMicromapEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCmdCopyMicromapToMemoryEXT fp_vkCmdCopyMicromapToMemoryEXT = nullptr; +#else + void * fp_vkCmdCopyMicromapToMemoryEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCopyMicromapToMemoryEXT fp_vkCopyMicromapToMemoryEXT = nullptr; +#else + void * fp_vkCopyMicromapToMemoryEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCmdCopyMemoryToMicromapEXT fp_vkCmdCopyMemoryToMicromapEXT = nullptr; +#else + void * fp_vkCmdCopyMemoryToMicromapEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCopyMemoryToMicromapEXT fp_vkCopyMemoryToMicromapEXT = nullptr; +#else + void * fp_vkCopyMemoryToMicromapEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkCmdWriteMicromapsPropertiesEXT fp_vkCmdWriteMicromapsPropertiesEXT = nullptr; +#else + void * fp_vkCmdWriteMicromapsPropertiesEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkWriteMicromapsPropertiesEXT fp_vkWriteMicromapsPropertiesEXT = nullptr; +#else + void * fp_vkWriteMicromapsPropertiesEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkGetDeviceMicromapCompatibilityEXT fp_vkGetDeviceMicromapCompatibilityEXT = nullptr; +#else + void * fp_vkGetDeviceMicromapCompatibilityEXT{}; +#endif +#if (defined(VK_EXT_opacity_micromap)) + PFN_vkGetMicromapBuildSizesEXT fp_vkGetMicromapBuildSizesEXT = nullptr; +#else + void * fp_vkGetMicromapBuildSizesEXT{}; +#endif +#if (defined(VK_EXT_shader_module_identifier)) + PFN_vkGetShaderModuleIdentifierEXT fp_vkGetShaderModuleIdentifierEXT = nullptr; +#else + void * fp_vkGetShaderModuleIdentifierEXT{}; +#endif +#if (defined(VK_EXT_shader_module_identifier)) + PFN_vkGetShaderModuleCreateInfoIdentifierEXT fp_vkGetShaderModuleCreateInfoIdentifierEXT = nullptr; +#else + void * fp_vkGetShaderModuleCreateInfoIdentifierEXT{}; +#endif +#if (defined(VK_KHR_maintenance5)) + PFN_vkGetImageSubresourceLayout2KHR fp_vkGetImageSubresourceLayout2KHR = nullptr; +#else + void * fp_vkGetImageSubresourceLayout2KHR{}; +#endif +#if (defined(VK_EXT_pipeline_properties)) + PFN_vkGetPipelinePropertiesEXT fp_vkGetPipelinePropertiesEXT = nullptr; +#else + void * fp_vkGetPipelinePropertiesEXT{}; +#endif +#if (defined(VK_EXT_metal_objects)) + PFN_vkExportMetalObjectsEXT fp_vkExportMetalObjectsEXT = nullptr; +#else + void * fp_vkExportMetalObjectsEXT{}; +#endif +#if (defined(VK_QCOM_tile_properties)) + PFN_vkGetFramebufferTilePropertiesQCOM fp_vkGetFramebufferTilePropertiesQCOM = nullptr; +#else + void * fp_vkGetFramebufferTilePropertiesQCOM{}; +#endif +#if (defined(VK_QCOM_tile_properties)) + PFN_vkGetDynamicRenderingTilePropertiesQCOM fp_vkGetDynamicRenderingTilePropertiesQCOM = nullptr; +#else + void * fp_vkGetDynamicRenderingTilePropertiesQCOM{}; +#endif +#if (defined(VK_NV_optical_flow)) + PFN_vkCreateOpticalFlowSessionNV fp_vkCreateOpticalFlowSessionNV = nullptr; +#else + void * fp_vkCreateOpticalFlowSessionNV{}; +#endif +#if (defined(VK_NV_optical_flow)) + PFN_vkDestroyOpticalFlowSessionNV fp_vkDestroyOpticalFlowSessionNV = nullptr; +#else + void * fp_vkDestroyOpticalFlowSessionNV{}; +#endif +#if (defined(VK_NV_optical_flow)) + PFN_vkBindOpticalFlowSessionImageNV fp_vkBindOpticalFlowSessionImageNV = nullptr; +#else + void * fp_vkBindOpticalFlowSessionImageNV{}; +#endif +#if (defined(VK_NV_optical_flow)) + PFN_vkCmdOpticalFlowExecuteNV fp_vkCmdOpticalFlowExecuteNV = nullptr; +#else + void * fp_vkCmdOpticalFlowExecuteNV{}; +#endif +#if (defined(VK_EXT_device_fault)) + PFN_vkGetDeviceFaultInfoEXT fp_vkGetDeviceFaultInfoEXT = nullptr; +#else + void * fp_vkGetDeviceFaultInfoEXT{}; +#endif +#if (defined(VK_EXT_depth_bias_control)) + PFN_vkCmdSetDepthBias2EXT fp_vkCmdSetDepthBias2EXT = nullptr; +#else + void * fp_vkCmdSetDepthBias2EXT{}; +#endif +#if (defined(VK_EXT_swapchain_maintenance1)) + PFN_vkReleaseSwapchainImagesEXT fp_vkReleaseSwapchainImagesEXT = nullptr; +#else + void * fp_vkReleaseSwapchainImagesEXT{}; +#endif +#if (defined(VK_KHR_maintenance5)) + PFN_vkGetDeviceImageSubresourceLayoutKHR fp_vkGetDeviceImageSubresourceLayoutKHR = nullptr; +#else + void * fp_vkGetDeviceImageSubresourceLayoutKHR{}; +#endif +#if (defined(VK_KHR_map_memory2)) + PFN_vkMapMemory2KHR fp_vkMapMemory2KHR = nullptr; +#else + void * fp_vkMapMemory2KHR{}; +#endif +#if (defined(VK_KHR_map_memory2)) + PFN_vkUnmapMemory2KHR fp_vkUnmapMemory2KHR = nullptr; +#else + void * fp_vkUnmapMemory2KHR{}; +#endif +#if (defined(VK_EXT_shader_object)) + PFN_vkCreateShadersEXT fp_vkCreateShadersEXT = nullptr; +#else + void * fp_vkCreateShadersEXT{}; +#endif +#if (defined(VK_EXT_shader_object)) + PFN_vkDestroyShaderEXT fp_vkDestroyShaderEXT = nullptr; +#else + void * fp_vkDestroyShaderEXT{}; +#endif +#if (defined(VK_EXT_shader_object)) + PFN_vkGetShaderBinaryDataEXT fp_vkGetShaderBinaryDataEXT = nullptr; +#else + void * fp_vkGetShaderBinaryDataEXT{}; +#endif +#if (defined(VK_EXT_shader_object)) + PFN_vkCmdBindShadersEXT fp_vkCmdBindShadersEXT = nullptr; +#else + void * fp_vkCmdBindShadersEXT{}; +#endif +#if (defined(VK_QNX_external_memory_screen_buffer)) + PFN_vkGetScreenBufferPropertiesQNX fp_vkGetScreenBufferPropertiesQNX = nullptr; +#else + void * fp_vkGetScreenBufferPropertiesQNX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX fp_vkGetExecutionGraphPipelineScratchSizeAMDX = nullptr; +#else + void * fp_vkGetExecutionGraphPipelineScratchSizeAMDX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX fp_vkGetExecutionGraphPipelineNodeIndexAMDX = nullptr; +#else + void * fp_vkGetExecutionGraphPipelineNodeIndexAMDX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkCreateExecutionGraphPipelinesAMDX fp_vkCreateExecutionGraphPipelinesAMDX = nullptr; +#else + void * fp_vkCreateExecutionGraphPipelinesAMDX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkCmdInitializeGraphScratchMemoryAMDX fp_vkCmdInitializeGraphScratchMemoryAMDX = nullptr; +#else + void * fp_vkCmdInitializeGraphScratchMemoryAMDX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkCmdDispatchGraphAMDX fp_vkCmdDispatchGraphAMDX = nullptr; +#else + void * fp_vkCmdDispatchGraphAMDX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkCmdDispatchGraphIndirectAMDX fp_vkCmdDispatchGraphIndirectAMDX = nullptr; +#else + void * fp_vkCmdDispatchGraphIndirectAMDX{}; +#endif +#if (defined(VK_AMDX_shader_enqueue)) + PFN_vkCmdDispatchGraphIndirectCountAMDX fp_vkCmdDispatchGraphIndirectCountAMDX = nullptr; +#else + void * fp_vkCmdDispatchGraphIndirectCountAMDX{}; +#endif +#if (defined(VK_KHR_maintenance6)) + PFN_vkCmdBindDescriptorSets2KHR fp_vkCmdBindDescriptorSets2KHR = nullptr; +#else + void * fp_vkCmdBindDescriptorSets2KHR{}; +#endif +#if (defined(VK_KHR_maintenance6)) + PFN_vkCmdPushConstants2KHR fp_vkCmdPushConstants2KHR = nullptr; +#else + void * fp_vkCmdPushConstants2KHR{}; +#endif +#if (defined(VK_KHR_maintenance6)) + PFN_vkCmdPushDescriptorSet2KHR fp_vkCmdPushDescriptorSet2KHR = nullptr; +#else + void * fp_vkCmdPushDescriptorSet2KHR{}; +#endif +#if (defined(VK_KHR_maintenance6)) + PFN_vkCmdPushDescriptorSetWithTemplate2KHR fp_vkCmdPushDescriptorSetWithTemplate2KHR = nullptr; +#else + void * fp_vkCmdPushDescriptorSetWithTemplate2KHR{}; +#endif +#if (defined(VK_KHR_maintenance6)) + PFN_vkCmdSetDescriptorBufferOffsets2EXT fp_vkCmdSetDescriptorBufferOffsets2EXT = nullptr; +#else + void * fp_vkCmdSetDescriptorBufferOffsets2EXT{}; +#endif +#if (defined(VK_KHR_maintenance6)) + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = nullptr; +#else + void * fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT{}; +#endif +#if (defined(VK_NV_low_latency2)) + PFN_vkSetLatencySleepModeNV fp_vkSetLatencySleepModeNV = nullptr; +#else + void * fp_vkSetLatencySleepModeNV{}; +#endif +#if (defined(VK_NV_low_latency2)) + PFN_vkLatencySleepNV fp_vkLatencySleepNV = nullptr; +#else + void * fp_vkLatencySleepNV{}; +#endif +#if (defined(VK_NV_low_latency2)) + PFN_vkSetLatencyMarkerNV fp_vkSetLatencyMarkerNV = nullptr; +#else + void * fp_vkSetLatencyMarkerNV{}; +#endif +#if ((defined(VK_NV_low_latency2))) && VK_HEADER_VERSION >= 271 + PFN_vkGetLatencyTimingsNV fp_vkGetLatencyTimingsNV = nullptr; +#else + void * fp_vkGetLatencyTimingsNV{}; +#endif +#if (defined(VK_NV_low_latency2)) + PFN_vkQueueNotifyOutOfBandNV fp_vkQueueNotifyOutOfBandNV = nullptr; +#else + void * fp_vkQueueNotifyOutOfBandNV{}; +#endif +#if (defined(VK_KHR_dynamic_rendering_local_read)) + PFN_vkCmdSetRenderingAttachmentLocationsKHR fp_vkCmdSetRenderingAttachmentLocationsKHR = nullptr; +#else + void * fp_vkCmdSetRenderingAttachmentLocationsKHR{}; +#endif +#if (defined(VK_KHR_dynamic_rendering_local_read)) + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR fp_vkCmdSetRenderingInputAttachmentIndicesKHR = nullptr; +#else + void * fp_vkCmdSetRenderingInputAttachmentIndicesKHR{}; +#endif +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_depth_clamp_control)) + PFN_vkCmdSetDepthClampRangeEXT fp_vkCmdSetDepthClampRangeEXT = nullptr; +#else + void * fp_vkCmdSetDepthClampRangeEXT{}; +#endif +#if (defined(VK_EXT_host_query_reset)) + PFN_vkResetQueryPoolEXT fp_vkResetQueryPoolEXT = nullptr; +#else + void * fp_vkResetQueryPoolEXT{}; +#endif +#if (defined(VK_KHR_maintenance1)) + PFN_vkTrimCommandPoolKHR fp_vkTrimCommandPoolKHR = nullptr; +#else + void * fp_vkTrimCommandPoolKHR{}; +#endif +#if (defined(VK_KHR_device_group)) + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR fp_vkGetDeviceGroupPeerMemoryFeaturesKHR = nullptr; +#else + void * fp_vkGetDeviceGroupPeerMemoryFeaturesKHR{}; +#endif +#if (defined(VK_KHR_bind_memory2)) + PFN_vkBindBufferMemory2KHR fp_vkBindBufferMemory2KHR = nullptr; +#else + void * fp_vkBindBufferMemory2KHR{}; +#endif +#if (defined(VK_KHR_bind_memory2)) + PFN_vkBindImageMemory2KHR fp_vkBindImageMemory2KHR = nullptr; +#else + void * fp_vkBindImageMemory2KHR{}; +#endif +#if (defined(VK_KHR_device_group)) + PFN_vkCmdSetDeviceMaskKHR fp_vkCmdSetDeviceMaskKHR = nullptr; +#else + void * fp_vkCmdSetDeviceMaskKHR{}; +#endif +#if (defined(VK_KHR_device_group)) + PFN_vkCmdDispatchBaseKHR fp_vkCmdDispatchBaseKHR = nullptr; +#else + void * fp_vkCmdDispatchBaseKHR{}; +#endif +#if (defined(VK_KHR_descriptor_update_template)) + PFN_vkCreateDescriptorUpdateTemplateKHR fp_vkCreateDescriptorUpdateTemplateKHR = nullptr; +#else + void * fp_vkCreateDescriptorUpdateTemplateKHR{}; +#endif +#if (defined(VK_KHR_descriptor_update_template)) + PFN_vkDestroyDescriptorUpdateTemplateKHR fp_vkDestroyDescriptorUpdateTemplateKHR = nullptr; +#else + void * fp_vkDestroyDescriptorUpdateTemplateKHR{}; +#endif +#if (defined(VK_KHR_descriptor_update_template)) + PFN_vkUpdateDescriptorSetWithTemplateKHR fp_vkUpdateDescriptorSetWithTemplateKHR = nullptr; +#else + void * fp_vkUpdateDescriptorSetWithTemplateKHR{}; +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + PFN_vkGetBufferMemoryRequirements2KHR fp_vkGetBufferMemoryRequirements2KHR = nullptr; +#else + void * fp_vkGetBufferMemoryRequirements2KHR{}; +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + PFN_vkGetImageMemoryRequirements2KHR fp_vkGetImageMemoryRequirements2KHR = nullptr; +#else + void * fp_vkGetImageMemoryRequirements2KHR{}; +#endif +#if (defined(VK_KHR_get_memory_requirements2)) + PFN_vkGetImageSparseMemoryRequirements2KHR fp_vkGetImageSparseMemoryRequirements2KHR = nullptr; +#else + void * fp_vkGetImageSparseMemoryRequirements2KHR{}; +#endif +#if (defined(VK_KHR_maintenance4)) + PFN_vkGetDeviceBufferMemoryRequirementsKHR fp_vkGetDeviceBufferMemoryRequirementsKHR = nullptr; +#else + void * fp_vkGetDeviceBufferMemoryRequirementsKHR{}; +#endif +#if (defined(VK_KHR_maintenance4)) + PFN_vkGetDeviceImageMemoryRequirementsKHR fp_vkGetDeviceImageMemoryRequirementsKHR = nullptr; +#else + void * fp_vkGetDeviceImageMemoryRequirementsKHR{}; +#endif +#if (defined(VK_KHR_maintenance4)) + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR fp_vkGetDeviceImageSparseMemoryRequirementsKHR = nullptr; +#else + void * fp_vkGetDeviceImageSparseMemoryRequirementsKHR{}; +#endif +#if (defined(VK_KHR_sampler_ycbcr_conversion)) + PFN_vkCreateSamplerYcbcrConversionKHR fp_vkCreateSamplerYcbcrConversionKHR = nullptr; +#else + void * fp_vkCreateSamplerYcbcrConversionKHR{}; +#endif +#if (defined(VK_KHR_sampler_ycbcr_conversion)) + PFN_vkDestroySamplerYcbcrConversionKHR fp_vkDestroySamplerYcbcrConversionKHR = nullptr; +#else + void * fp_vkDestroySamplerYcbcrConversionKHR{}; +#endif +#if (defined(VK_KHR_maintenance3)) + PFN_vkGetDescriptorSetLayoutSupportKHR fp_vkGetDescriptorSetLayoutSupportKHR = nullptr; +#else + void * fp_vkGetDescriptorSetLayoutSupportKHR{}; +#endif +#if (defined(VK_EXT_calibrated_timestamps)) + PFN_vkGetCalibratedTimestampsEXT fp_vkGetCalibratedTimestampsEXT = nullptr; +#else + void * fp_vkGetCalibratedTimestampsEXT{}; +#endif +#if (defined(VK_KHR_create_renderpass2)) + PFN_vkCreateRenderPass2KHR fp_vkCreateRenderPass2KHR = nullptr; +#else + void * fp_vkCreateRenderPass2KHR{}; +#endif +#if (defined(VK_KHR_create_renderpass2)) + PFN_vkCmdBeginRenderPass2KHR fp_vkCmdBeginRenderPass2KHR = nullptr; +#else + void * fp_vkCmdBeginRenderPass2KHR{}; +#endif +#if (defined(VK_KHR_create_renderpass2)) + PFN_vkCmdNextSubpass2KHR fp_vkCmdNextSubpass2KHR = nullptr; +#else + void * fp_vkCmdNextSubpass2KHR{}; +#endif +#if (defined(VK_KHR_create_renderpass2)) + PFN_vkCmdEndRenderPass2KHR fp_vkCmdEndRenderPass2KHR = nullptr; +#else + void * fp_vkCmdEndRenderPass2KHR{}; +#endif +#if (defined(VK_KHR_timeline_semaphore)) + PFN_vkGetSemaphoreCounterValueKHR fp_vkGetSemaphoreCounterValueKHR = nullptr; +#else + void * fp_vkGetSemaphoreCounterValueKHR{}; +#endif +#if (defined(VK_KHR_timeline_semaphore)) + PFN_vkWaitSemaphoresKHR fp_vkWaitSemaphoresKHR = nullptr; +#else + void * fp_vkWaitSemaphoresKHR{}; +#endif +#if (defined(VK_KHR_timeline_semaphore)) + PFN_vkSignalSemaphoreKHR fp_vkSignalSemaphoreKHR = nullptr; +#else + void * fp_vkSignalSemaphoreKHR{}; +#endif +#if (defined(VK_AMD_draw_indirect_count)) + PFN_vkCmdDrawIndirectCountAMD fp_vkCmdDrawIndirectCountAMD = nullptr; +#else + void * fp_vkCmdDrawIndirectCountAMD{}; +#endif +#if (defined(VK_AMD_draw_indirect_count)) + PFN_vkCmdDrawIndexedIndirectCountAMD fp_vkCmdDrawIndexedIndirectCountAMD = nullptr; +#else + void * fp_vkCmdDrawIndexedIndirectCountAMD{}; +#endif +#if (defined(VK_NV_ray_tracing)) + PFN_vkGetRayTracingShaderGroupHandlesNV fp_vkGetRayTracingShaderGroupHandlesNV = nullptr; +#else + void * fp_vkGetRayTracingShaderGroupHandlesNV{}; +#endif +#if (defined(VK_KHR_buffer_device_address)) + PFN_vkGetBufferOpaqueCaptureAddressKHR fp_vkGetBufferOpaqueCaptureAddressKHR = nullptr; +#else + void * fp_vkGetBufferOpaqueCaptureAddressKHR{}; +#endif +#if (defined(VK_EXT_buffer_device_address)) + PFN_vkGetBufferDeviceAddressEXT fp_vkGetBufferDeviceAddressEXT = nullptr; +#else + void * fp_vkGetBufferDeviceAddressEXT{}; +#endif +#if (defined(VK_KHR_buffer_device_address)) + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR = nullptr; +#else + void * fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR{}; +#endif +#if (defined(VK_EXT_line_rasterization)) + PFN_vkCmdSetLineStippleEXT fp_vkCmdSetLineStippleEXT = nullptr; +#else + void * fp_vkCmdSetLineStippleEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetCullModeEXT fp_vkCmdSetCullModeEXT = nullptr; +#else + void * fp_vkCmdSetCullModeEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetFrontFaceEXT fp_vkCmdSetFrontFaceEXT = nullptr; +#else + void * fp_vkCmdSetFrontFaceEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetPrimitiveTopologyEXT fp_vkCmdSetPrimitiveTopologyEXT = nullptr; +#else + void * fp_vkCmdSetPrimitiveTopologyEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetViewportWithCountEXT fp_vkCmdSetViewportWithCountEXT = nullptr; +#else + void * fp_vkCmdSetViewportWithCountEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetScissorWithCountEXT fp_vkCmdSetScissorWithCountEXT = nullptr; +#else + void * fp_vkCmdSetScissorWithCountEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdBindVertexBuffers2EXT fp_vkCmdBindVertexBuffers2EXT = nullptr; +#else + void * fp_vkCmdBindVertexBuffers2EXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthTestEnableEXT fp_vkCmdSetDepthTestEnableEXT = nullptr; +#else + void * fp_vkCmdSetDepthTestEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthWriteEnableEXT fp_vkCmdSetDepthWriteEnableEXT = nullptr; +#else + void * fp_vkCmdSetDepthWriteEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthCompareOpEXT fp_vkCmdSetDepthCompareOpEXT = nullptr; +#else + void * fp_vkCmdSetDepthCompareOpEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthBoundsTestEnableEXT fp_vkCmdSetDepthBoundsTestEnableEXT = nullptr; +#else + void * fp_vkCmdSetDepthBoundsTestEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetStencilTestEnableEXT fp_vkCmdSetStencilTestEnableEXT = nullptr; +#else + void * fp_vkCmdSetStencilTestEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetStencilOpEXT fp_vkCmdSetStencilOpEXT = nullptr; +#else + void * fp_vkCmdSetStencilOpEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetRasterizerDiscardEnableEXT fp_vkCmdSetRasterizerDiscardEnableEXT = nullptr; +#else + void * fp_vkCmdSetRasterizerDiscardEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthBiasEnableEXT fp_vkCmdSetDepthBiasEnableEXT = nullptr; +#else + void * fp_vkCmdSetDepthBiasEnableEXT{}; +#endif +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetPrimitiveRestartEnableEXT fp_vkCmdSetPrimitiveRestartEnableEXT = nullptr; +#else + void * fp_vkCmdSetPrimitiveRestartEnableEXT{}; +#endif +#if (defined(VK_EXT_private_data)) + PFN_vkCreatePrivateDataSlotEXT fp_vkCreatePrivateDataSlotEXT = nullptr; +#else + void * fp_vkCreatePrivateDataSlotEXT{}; +#endif +#if (defined(VK_EXT_private_data)) + PFN_vkDestroyPrivateDataSlotEXT fp_vkDestroyPrivateDataSlotEXT = nullptr; +#else + void * fp_vkDestroyPrivateDataSlotEXT{}; +#endif +#if (defined(VK_EXT_private_data)) + PFN_vkSetPrivateDataEXT fp_vkSetPrivateDataEXT = nullptr; +#else + void * fp_vkSetPrivateDataEXT{}; +#endif +#if (defined(VK_EXT_private_data)) + PFN_vkGetPrivateDataEXT fp_vkGetPrivateDataEXT = nullptr; +#else + void * fp_vkGetPrivateDataEXT{}; +#endif +#if (defined(VK_KHR_copy_commands2)) + PFN_vkCmdCopyBuffer2KHR fp_vkCmdCopyBuffer2KHR = nullptr; +#else + void * fp_vkCmdCopyBuffer2KHR{}; +#endif +#if (defined(VK_KHR_copy_commands2)) + PFN_vkCmdCopyImage2KHR fp_vkCmdCopyImage2KHR = nullptr; +#else + void * fp_vkCmdCopyImage2KHR{}; +#endif +#if (defined(VK_KHR_copy_commands2)) + PFN_vkCmdBlitImage2KHR fp_vkCmdBlitImage2KHR = nullptr; +#else + void * fp_vkCmdBlitImage2KHR{}; +#endif +#if (defined(VK_KHR_copy_commands2)) + PFN_vkCmdCopyBufferToImage2KHR fp_vkCmdCopyBufferToImage2KHR = nullptr; +#else + void * fp_vkCmdCopyBufferToImage2KHR{}; +#endif +#if (defined(VK_KHR_copy_commands2)) + PFN_vkCmdCopyImageToBuffer2KHR fp_vkCmdCopyImageToBuffer2KHR = nullptr; +#else + void * fp_vkCmdCopyImageToBuffer2KHR{}; +#endif +#if (defined(VK_KHR_copy_commands2)) + PFN_vkCmdResolveImage2KHR fp_vkCmdResolveImage2KHR = nullptr; +#else + void * fp_vkCmdResolveImage2KHR{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkCmdSetEvent2KHR fp_vkCmdSetEvent2KHR = nullptr; +#else + void * fp_vkCmdSetEvent2KHR{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkCmdResetEvent2KHR fp_vkCmdResetEvent2KHR = nullptr; +#else + void * fp_vkCmdResetEvent2KHR{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkCmdWaitEvents2KHR fp_vkCmdWaitEvents2KHR = nullptr; +#else + void * fp_vkCmdWaitEvents2KHR{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkCmdPipelineBarrier2KHR fp_vkCmdPipelineBarrier2KHR = nullptr; +#else + void * fp_vkCmdPipelineBarrier2KHR{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkQueueSubmit2KHR fp_vkQueueSubmit2KHR = nullptr; +#else + void * fp_vkQueueSubmit2KHR{}; +#endif +#if (defined(VK_KHR_synchronization2)) + PFN_vkCmdWriteTimestamp2KHR fp_vkCmdWriteTimestamp2KHR = nullptr; +#else + void * fp_vkCmdWriteTimestamp2KHR{}; +#endif +#if (defined(VK_KHR_dynamic_rendering)) + PFN_vkCmdBeginRenderingKHR fp_vkCmdBeginRenderingKHR = nullptr; +#else + void * fp_vkCmdBeginRenderingKHR{}; +#endif +#if (defined(VK_KHR_dynamic_rendering)) + PFN_vkCmdEndRenderingKHR fp_vkCmdEndRenderingKHR = nullptr; +#else + void * fp_vkCmdEndRenderingKHR{}; +#endif +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) + PFN_vkGetImageSubresourceLayout2EXT fp_vkGetImageSubresourceLayout2EXT = nullptr; +#else + void * fp_vkGetImageSubresourceLayout2EXT{}; +#endif + bool is_populated() const { return populated; } + VkDevice device = VK_NULL_HANDLE; + private: + bool populated = false; +}; + +GFXRECON_END_NAMESPACE(gfxrecon) + +GFXRECON_END_NAMESPACE(test) + +#endif // GFXRECON_TEST_APP_DISPATCH_H diff --git a/test/test_apps/triangle/CMakeLists.txt b/test/test_apps/triangle/CMakeLists.txt new file mode 100644 index 0000000000..69ac79b2b1 --- /dev/null +++ b/test/test_apps/triangle/CMakeLists.txt @@ -0,0 +1,70 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for triangle test app +############################################################################### + +add_executable(gfxrecon-testapp-triangle "") + +target_sources(gfxrecon-testapp-triangle + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/triangle.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp + ) + +target_include_directories(gfxrecon-testapp-triangle PUBLIC + ${CMAKE_BINARY_DIR} + ${CMAKE_CURRENT_LIST_DIR}/../common) + +target_link_libraries(gfxrecon-testapp-triangle + gfxrecon_application + gfxrecon_decode + gfxrecon_graphics + gfxrecon_format + gfxrecon_util + SDL3::SDL3 + platform_specific) + +if (MSVC) + # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. + # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. + if(CMAKE_SIZEOF_VOID_P EQUAL 4) + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") + else() + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") + endif() +endif() + +common_build_directives(gfxrecon-testapp-triangle) + +add_custom_command( + TARGET gfxrecon-testapp-triangle + POST_BUILD + COMMAND + ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_LIST_DIR}/shaders "$" + DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) + +install(TARGETS gfxrecon-testapp-triangle RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) diff --git a/test/test_apps/triangle/shaders/frag.spv b/test/test_apps/triangle/shaders/frag.spv new file mode 100644 index 0000000000000000000000000000000000000000..1331eb4cda17af9c3384bd5c049b00369d68a009 GIT binary patch literal 608 zcmYk2-Acni5QWF4X=7{uEa;t7ycCKTDuPr|k=z98&jTzmSv3-qkfefleKw!U8^Lqd zM6xiMowMKWoQc&s=!$HJBLkVr-i9y>RhEyZ#pOoGCeZxa_M@v zD#$Wbl%KIyag=BkMmebHLz8nFT$BXyDr^Eah^9ANY~wFol{aae=CB0CHh!OT|D_gP z+~OR21h80hrn;VD8qTwCCKI#Y!M+1+hS&wqxmmOS3||MCe~$WjRkH^*@;zkotjYJ0 z@w<#po;5vCJ48)(h!9y{o?-_2VJ|Me@eRdBUO?sB0|f8iL*?C3r6K)m<4R+XnwwM_ b-@k)<`rk(IK6#$g#|64_9-jTcrd2B5XJ94y1WEL1biSCP<((ZDi9N+2EkO5O+ZNa+SJ{OHMP5Gi%~!MpPBeq z`NhQZYkN0)H}b+^)X{=X537hJyWhZQx{{*lxY>->zsDlS>9{!>~5o& zF^xiqW>%G~rtE7FCB$p84cQY}L%+KGABmf0!gw8j|KyFIg@Ygs^3cyd2fZ-$`yaoA z-8@4xiN-rR@;Yz*?qJ|&`HwjC<0uLJUX)4|eGAczkLaSL8(;KB1&$P>JvqiPm(<6g zpGA4pPqdYordg8j_Tzpk!qL30@OkVXhv`|E=HYiiu_aGTuKI-7u|m%|6nvEAr_oP! zM@^YycI6J@XmA*1d60C&VUANIcM-?KoTKyN{8;qEIZ9JD^FHa(5wmc%pVHv`fZi3q z6X(jarb;*D=noEWb@T;C50)c_%DTzPb!82TXhu)^%WB4f=F?fe1mvVQA^&H zEva6Qtia?Jgn2ctjI~+e3B}Ye3s1^Z1Lq2}H{XMQQDN>Aj(YsGjAijm34d?tZ1~o! z3=O>1Tvn|0+G^$m;4sTwQA|zAwTd3*h4~(O^2~Tuo<8gh3yR_F42xABo*BTrztI`4 z2^;fE{j`eO>q2n!jamLig>BCAH%GMO;Ms3k#ecfg(2WZc1RdGc^8 zid=bHLk{C^R^^%5l`$81O$NtZugkl#nv8wGcVsN;@5+;dryjgfapdnSrVs8A+?MCt z^a9?lFlS9YnD>c>{4;s#$?xec@CKQgh32mqNB)%%Z;HDm=59E*mr~Q)JsExNOHEG? zWa#11&>#N~_z%*s=Vryp4Y{p~W9DO3F~=hrT5{YocgH@=`dG$!a%S`eMsNFis@V3$ YtUEG#heyNQyE1s{IUDdlo%(axfA`;bD*ylh literal 0 HcmV?d00001 diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp new file mode 100644 index 0000000000..c011928849 --- /dev/null +++ b/test/test_apps/triangle/triangle.cpp @@ -0,0 +1,660 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include + +#include +#include +#include +#include + +#include + +#include +#include +#include + +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test_app) + +GFXRECON_BEGIN_NAMESPACE(triangle) + +const int MAX_FRAMES_IN_FLIGHT = 2; + +struct Init { + SDL_Window* window; + gfxrecon::test::Instance instance; + gfxrecon::test::InstanceDispatchTable inst_disp; + VkSurfaceKHR surface; + gfxrecon::test::Device device; + gfxrecon::test::DispatchTable disp; + gfxrecon::test::Swapchain swapchain; +}; + +struct RenderData { + VkQueue graphics_queue; + VkQueue present_queue; + + std::vector swapchain_images; + std::vector swapchain_image_views; + std::vector framebuffers; + + VkRenderPass render_pass; + VkPipelineLayout pipeline_layout; + VkPipeline graphics_pipeline; + + VkCommandPool command_pool; + std::vector command_buffers; + + std::vector available_semaphores; + std::vector finished_semaphore; + std::vector in_flight_fences; + std::vector image_in_flight; + size_t current_frame = 0; +}; + +SDL_Window* create_window_sdl(const char* window_name = "", bool resize = true) { + if (!SDL_Init(SDL_INIT_VIDEO)) { + std::cout << SDL_GetError() << std::endl; + return nullptr; + } + + SDL_WindowFlags flags = 0; + flags |= SDL_WINDOW_VULKAN; + if (resize) flags |= SDL_WINDOW_RESIZABLE; + + auto window = SDL_CreateWindow(window_name, 1024, 1024, flags); + if (window == nullptr) { + std::cout << SDL_GetError() << std::endl; + return nullptr; + } + return window; +} + +void destroy_window_sdl(SDL_Window * window) { + SDL_DestroyWindow(window); + SDL_Quit(); +} + +VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr) { + VkSurfaceKHR surface = VK_NULL_HANDLE; + if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) { + auto error = SDL_GetError(); + std::cout << error << std::endl; + surface = VK_NULL_HANDLE; + } + return surface; +} + +int device_initialization(Init& init) { + init.window = create_window_sdl("Vulkan Triangle", true); + if (init.window == nullptr) return -1; + + gfxrecon::test::InstanceBuilder instance_builder; + auto instance_ret = instance_builder.use_default_debug_messenger().request_validation_layers().build(); + if (!instance_ret) { + std::cout << instance_ret.error().message() << "\n"; + return -1; + } + init.instance = instance_ret.value(); + + init.inst_disp = init.instance.make_table(); + + init.surface = create_surface_sdl(init.instance, init.window); + if (init.surface == nullptr) return -1; + + gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); + auto phys_device_ret = phys_device_selector.set_surface(init.surface).select(); + if (!phys_device_ret) { + std::cout << phys_device_ret.error().message() << "\n"; + return -1; + } + gfxrecon::test::PhysicalDevice physical_device = phys_device_ret.value(); + + gfxrecon::test::DeviceBuilder device_builder{ physical_device }; + auto device_ret = device_builder.build(); + if (!device_ret) { + std::cout << device_ret.error().message() << "\n"; + return -1; + } + init.device = device_ret.value(); + + init.disp = init.device.make_table(); + + return 0; +} + +int create_swapchain(Init& init) { + + gfxrecon::test::SwapchainBuilder swapchain_builder{ init.device }; + auto swap_ret = swapchain_builder.set_old_swapchain(init.swapchain).build(); + if (!swap_ret) { + std::cout << swap_ret.error().message() << " " << swap_ret.vk_result() << "\n"; + return -1; + } + gfxrecon::test::destroy_swapchain(init.swapchain); + init.swapchain = swap_ret.value(); + return 0; +} + +int get_queues(Init& init, RenderData& data) { + auto gq = init.device.get_queue(gfxrecon::test::QueueType::graphics); + if (!gq.has_value()) { + std::cout << "failed to get graphics queue: " << gq.error().message() << "\n"; + return -1; + } + data.graphics_queue = gq.value(); + + auto pq = init.device.get_queue(gfxrecon::test::QueueType::present); + if (!pq.has_value()) { + std::cout << "failed to get present queue: " << pq.error().message() << "\n"; + return -1; + } + data.present_queue = pq.value(); + return 0; +} + +int create_render_pass(Init& init, RenderData& data) { + VkAttachmentDescription color_attachment = {}; + color_attachment.format = init.swapchain.image_format; + color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; + color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + color_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + color_attachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + + VkAttachmentReference color_attachment_ref = {}; + color_attachment_ref.attachment = 0; + color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + + VkSubpassDescription subpass = {}; + subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + subpass.colorAttachmentCount = 1; + subpass.pColorAttachments = &color_attachment_ref; + + VkSubpassDependency dependency = {}; + dependency.srcSubpass = VK_SUBPASS_EXTERNAL; + dependency.dstSubpass = 0; + dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + dependency.srcAccessMask = 0; + dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + + VkRenderPassCreateInfo render_pass_info = {}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + render_pass_info.attachmentCount = 1; + render_pass_info.pAttachments = &color_attachment; + render_pass_info.subpassCount = 1; + render_pass_info.pSubpasses = &subpass; + render_pass_info.dependencyCount = 1; + render_pass_info.pDependencies = &dependency; + + if (init.disp.createRenderPass(&render_pass_info, nullptr, &data.render_pass) != VK_SUCCESS) { + std::cout << "failed to create render pass\n"; + return -1; // failed to create render pass! + } + return 0; +} + +std::vector readFile(const std::string& filename) { + std::ifstream file(filename, std::ios::ate | std::ios::binary); + + if (!file.is_open()) { + throw std::runtime_error("failed to open file!"); + } + + size_t file_size = (size_t)file.tellg(); + std::vector buffer(file_size); + + file.seekg(0); + file.read(buffer.data(), static_cast(file_size)); + + file.close(); + + return buffer; +} + +VkShaderModule createShaderModule(Init& init, const std::vector& code) { + VkShaderModuleCreateInfo create_info = {}; + create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + create_info.codeSize = code.size(); + create_info.pCode = reinterpret_cast(code.data()); + + VkShaderModule shaderModule; + if (init.disp.createShaderModule(&create_info, nullptr, &shaderModule) != VK_SUCCESS) { + return VK_NULL_HANDLE; // failed to create shader module + } + + return shaderModule; +} + +int create_graphics_pipeline(Init& init, RenderData& data) { + auto vert_code = readFile("vert.spv"); + auto frag_code = readFile("frag.spv"); + + VkShaderModule vert_module = createShaderModule(init, vert_code); + VkShaderModule frag_module = createShaderModule(init, frag_code); + if (vert_module == VK_NULL_HANDLE || frag_module == VK_NULL_HANDLE) { + std::cout << "failed to create shader module\n"; + return -1; // failed to create shader modules + } + + VkPipelineShaderStageCreateInfo vert_stage_info = {}; + vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; + vert_stage_info.module = vert_module; + vert_stage_info.pName = "main"; + + VkPipelineShaderStageCreateInfo frag_stage_info = {}; + frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + frag_stage_info.module = frag_module; + frag_stage_info.pName = "main"; + + VkPipelineShaderStageCreateInfo shader_stages[] = { vert_stage_info, frag_stage_info }; + + VkPipelineVertexInputStateCreateInfo vertex_input_info = {}; + vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_input_info.vertexBindingDescriptionCount = 0; + vertex_input_info.vertexAttributeDescriptionCount = 0; + + VkPipelineInputAssemblyStateCreateInfo input_assembly = {}; + input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_assembly.primitiveRestartEnable = VK_FALSE; + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + VkPipelineViewportStateCreateInfo viewport_state = {}; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = &viewport; + viewport_state.scissorCount = 1; + viewport_state.pScissors = &scissor; + + VkPipelineRasterizationStateCreateInfo rasterizer = {}; + rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterizer.depthClampEnable = VK_FALSE; + rasterizer.rasterizerDiscardEnable = VK_FALSE; + rasterizer.polygonMode = VK_POLYGON_MODE_FILL; + rasterizer.lineWidth = 1.0f; + rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; + rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterizer.depthBiasEnable = VK_FALSE; + + VkPipelineMultisampleStateCreateInfo multisampling = {}; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; + + VkPipelineColorBlendAttachmentState colorBlendAttachment = {}; + colorBlendAttachment.colorWriteMask = + VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + colorBlendAttachment.blendEnable = VK_FALSE; + + VkPipelineColorBlendStateCreateInfo color_blending = {}; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &colorBlendAttachment; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; + + VkPipelineLayoutCreateInfo pipeline_layout_info = {}; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = 0; + pipeline_layout_info.pushConstantRangeCount = 0; + + if (init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &data.pipeline_layout) != VK_SUCCESS) { + std::cout << "failed to create pipeline layout\n"; + return -1; // failed to create pipeline layout + } + + std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; + + VkPipelineDynamicStateCreateInfo dynamic_info = {}; + dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); + dynamic_info.pDynamicStates = dynamic_states.data(); + + VkGraphicsPipelineCreateInfo pipeline_info = {}; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.stageCount = 2; + pipeline_info.pStages = shader_stages; + pipeline_info.pVertexInputState = &vertex_input_info; + pipeline_info.pInputAssemblyState = &input_assembly; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &rasterizer; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_info; + pipeline_info.layout = data.pipeline_layout; + pipeline_info.renderPass = data.render_pass; + pipeline_info.subpass = 0; + pipeline_info.basePipelineHandle = VK_NULL_HANDLE; + + if (init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &data.graphics_pipeline) != VK_SUCCESS) { + std::cout << "failed to create pipline\n"; + return -1; // failed to create graphics pipeline + } + + init.disp.destroyShaderModule(frag_module, nullptr); + init.disp.destroyShaderModule(vert_module, nullptr); + return 0; +} + +int create_framebuffers(Init& init, RenderData& data) { + data.swapchain_images = init.swapchain.get_images().value(); + data.swapchain_image_views = init.swapchain.get_image_views().value(); + + data.framebuffers.resize(data.swapchain_image_views.size()); + + for (size_t i = 0; i < data.swapchain_image_views.size(); i++) { + VkImageView attachments[] = { data.swapchain_image_views[i] }; + + VkFramebufferCreateInfo framebuffer_info = {}; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.renderPass = data.render_pass; + framebuffer_info.attachmentCount = 1; + framebuffer_info.pAttachments = attachments; + framebuffer_info.width = init.swapchain.extent.width; + framebuffer_info.height = init.swapchain.extent.height; + framebuffer_info.layers = 1; + + if (init.disp.createFramebuffer(&framebuffer_info, nullptr, &data.framebuffers[i]) != VK_SUCCESS) { + return -1; // failed to create framebuffer + } + } + return 0; +} + +int create_command_pool(Init& init, RenderData& data) { + VkCommandPoolCreateInfo pool_info = {}; + pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + pool_info.queueFamilyIndex = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); + + if (init.disp.createCommandPool(&pool_info, nullptr, &data.command_pool) != VK_SUCCESS) { + std::cout << "failed to create command pool\n"; + return -1; // failed to create command pool + } + return 0; +} + +int create_command_buffers(Init& init, RenderData& data) { + data.command_buffers.resize(data.framebuffers.size()); + + VkCommandBufferAllocateInfo allocInfo = {}; + allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + allocInfo.commandPool = data.command_pool; + allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + allocInfo.commandBufferCount = (uint32_t)data.command_buffers.size(); + + if (init.disp.allocateCommandBuffers(&allocInfo, data.command_buffers.data()) != VK_SUCCESS) { + return -1; // failed to allocate command buffers; + } + + for (size_t i = 0; i < data.command_buffers.size(); i++) { + VkCommandBufferBeginInfo begin_info = {}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + + if (init.disp.beginCommandBuffer(data.command_buffers[i], &begin_info) != VK_SUCCESS) { + return -1; // failed to begin recording command buffer + } + + VkRenderPassBeginInfo render_pass_info = {}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + render_pass_info.renderPass = data.render_pass; + render_pass_info.framebuffer = data.framebuffers[i]; + render_pass_info.renderArea.offset = { 0, 0 }; + render_pass_info.renderArea.extent = init.swapchain.extent; + VkClearValue clearColor{ { { 0.0f, 0.0f, 0.0f, 1.0f } } }; + render_pass_info.clearValueCount = 1; + render_pass_info.pClearValues = &clearColor; + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + init.disp.cmdSetViewport(data.command_buffers[i], 0, 1, &viewport); + init.disp.cmdSetScissor(data.command_buffers[i], 0, 1, &scissor); + + init.disp.cmdBeginRenderPass(data.command_buffers[i], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); + + init.disp.cmdBindPipeline(data.command_buffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, data.graphics_pipeline); + + init.disp.cmdDraw(data.command_buffers[i], 3, 1, 0, 0); + + init.disp.cmdEndRenderPass(data.command_buffers[i]); + + if (init.disp.endCommandBuffer(data.command_buffers[i]) != VK_SUCCESS) { + std::cout << "failed to record command buffer\n"; + return -1; // failed to record command buffer! + } + } + return 0; +} + +int create_sync_objects(Init& init, RenderData& data) { + data.available_semaphores.resize(MAX_FRAMES_IN_FLIGHT); + data.finished_semaphore.resize(MAX_FRAMES_IN_FLIGHT); + data.in_flight_fences.resize(MAX_FRAMES_IN_FLIGHT); + data.image_in_flight.resize(init.swapchain.image_count, VK_NULL_HANDLE); + + VkSemaphoreCreateInfo semaphore_info = {}; + semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + + VkFenceCreateInfo fence_info = {}; + fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; + + for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { + if (init.disp.createSemaphore(&semaphore_info, nullptr, &data.available_semaphores[i]) != VK_SUCCESS || + init.disp.createSemaphore(&semaphore_info, nullptr, &data.finished_semaphore[i]) != VK_SUCCESS || + init.disp.createFence(&fence_info, nullptr, &data.in_flight_fences[i]) != VK_SUCCESS) { + std::cout << "failed to create sync objects\n"; + return -1; // failed to create synchronization objects for a frame + } + } + return 0; +} + +int recreate_swapchain(Init& init, RenderData& data) { + init.disp.deviceWaitIdle(); + + init.disp.destroyCommandPool(data.command_pool, nullptr); + + for (auto framebuffer : data.framebuffers) { + init.disp.destroyFramebuffer(framebuffer, nullptr); + } + + init.swapchain.destroy_image_views(data.swapchain_image_views); + + if (0 != create_swapchain(init)) return -1; + if (0 != create_framebuffers(init, data)) return -1; + if (0 != create_command_pool(init, data)) return -1; + if (0 != create_command_buffers(init, data)) return -1; + return 0; +} + +int draw_frame(Init& init, RenderData& data) { + init.disp.waitForFences(1, &data.in_flight_fences[data.current_frame], VK_TRUE, UINT64_MAX); + + uint32_t image_index = 0; + VkResult result = init.disp.acquireNextImageKHR( + init.swapchain, UINT64_MAX, data.available_semaphores[data.current_frame], VK_NULL_HANDLE, &image_index); + + if (result == VK_ERROR_OUT_OF_DATE_KHR) { + return recreate_swapchain(init, data); + } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { + std::cout << "failed to acquire swapchain image. Error " << result << "\n"; + return -1; + } + + if (data.image_in_flight[image_index] != VK_NULL_HANDLE) { + init.disp.waitForFences(1, &data.image_in_flight[image_index], VK_TRUE, UINT64_MAX); + } + data.image_in_flight[image_index] = data.in_flight_fences[data.current_frame]; + + VkSubmitInfo submitInfo = {}; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + + VkSemaphore wait_semaphores[] = { data.available_semaphores[data.current_frame] }; + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; + submitInfo.waitSemaphoreCount = 1; + submitInfo.pWaitSemaphores = wait_semaphores; + submitInfo.pWaitDstStageMask = wait_stages; + + submitInfo.commandBufferCount = 1; + submitInfo.pCommandBuffers = &data.command_buffers[image_index]; + + VkSemaphore signal_semaphores[] = { data.finished_semaphore[data.current_frame] }; + submitInfo.signalSemaphoreCount = 1; + submitInfo.pSignalSemaphores = signal_semaphores; + + init.disp.resetFences(1, &data.in_flight_fences[data.current_frame]); + + if (init.disp.queueSubmit(data.graphics_queue, 1, &submitInfo, data.in_flight_fences[data.current_frame]) != VK_SUCCESS) { + std::cout << "failed to submit draw command buffer\n"; + return -1; //"failed to submit draw command buffer + } + + VkPresentInfoKHR present_info = {}; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + + present_info.waitSemaphoreCount = 1; + present_info.pWaitSemaphores = signal_semaphores; + + VkSwapchainKHR swapChains[] = { init.swapchain }; + present_info.swapchainCount = 1; + present_info.pSwapchains = swapChains; + + present_info.pImageIndices = &image_index; + + result = init.disp.queuePresentKHR(data.present_queue, &present_info); + if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { + return recreate_swapchain(init, data); + } else if (result != VK_SUCCESS) { + std::cout << "failed to present swapchain image\n"; + return -1; + } + + data.current_frame = (data.current_frame + 1) % MAX_FRAMES_IN_FLIGHT; + return 0; +} + +void cleanup(Init& init, RenderData& data) { + for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { + init.disp.destroySemaphore(data.finished_semaphore[i], nullptr); + init.disp.destroySemaphore(data.available_semaphores[i], nullptr); + init.disp.destroyFence(data.in_flight_fences[i], nullptr); + } + + init.disp.destroyCommandPool(data.command_pool, nullptr); + + for (auto framebuffer : data.framebuffers) { + init.disp.destroyFramebuffer(framebuffer, nullptr); + } + + init.disp.destroyPipeline(data.graphics_pipeline, nullptr); + init.disp.destroyPipelineLayout(data.pipeline_layout, nullptr); + init.disp.destroyRenderPass(data.render_pass, nullptr); + + init.swapchain.destroy_image_views(data.swapchain_image_views); + + gfxrecon::test::destroy_swapchain(init.swapchain); + gfxrecon::test::destroy_device(init.device); + gfxrecon::test::destroy_surface(init.instance, init.surface); + gfxrecon::test::destroy_instance(init.instance); + destroy_window_sdl(init.window); +} + +GFXRECON_END_NAMESPACE(triangle) + +GFXRECON_END_NAMESPACE(test_app) + +GFXRECON_END_NAMESPACE(gfxrecon) + +int main(int argc, char *argv[]) { + using namespace gfxrecon::test_app::triangle; + + Init init; + RenderData render_data; + + if (0 != device_initialization(init)) return -1; + if (0 != create_swapchain(init)) return -1; + if (0 != get_queues(init, render_data)) return -1; + if (0 != create_render_pass(init, render_data)) return -1; + if (0 != create_graphics_pipeline(init, render_data)) return -1; + if (0 != create_framebuffers(init, render_data)) return -1; + if (0 != create_command_pool(init, render_data)) return -1; + if (0 != create_command_buffers(init, render_data)) return -1; + if (0 != create_sync_objects(init, render_data)) return -1; + + bool running = true; + while (running) { + SDL_Event windowEvent; + while (SDL_PollEvent(&windowEvent)) { + if (windowEvent.type == SDL_EVENT_QUIT) { + running = false; + break; + } + } + + int res = draw_frame(init, render_data); + if (res != 0) { + std::cout << "failed to draw frame \n"; + return -1; + } + } + + init.disp.deviceWaitIdle(); + + cleanup(init, render_data); + return 0; +} From e6c9ffcd7f00b27a91f89efd6279af3e25013249 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 19 Oct 2024 16:52:01 -0400 Subject: [PATCH 02/70] Common test app cmake target --- test/test_apps/CMakeLists.txt | 2 ++ test/test_apps/triangle/CMakeLists.txt | 2 ++ 2 files changed, 4 insertions(+) diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt index e99f0396ad..6425db96a0 100644 --- a/test/test_apps/CMakeLists.txt +++ b/test/test_apps/CMakeLists.txt @@ -26,4 +26,6 @@ # Description: CMake script for test apps ############################################################################### +add_custom_target(gfxrecon-testapps) + add_subdirectory(triangle) diff --git a/test/test_apps/triangle/CMakeLists.txt b/test/test_apps/triangle/CMakeLists.txt index 69ac79b2b1..f3104887bb 100644 --- a/test/test_apps/triangle/CMakeLists.txt +++ b/test/test_apps/triangle/CMakeLists.txt @@ -68,3 +68,5 @@ add_custom_command( DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) install(TARGETS gfxrecon-testapp-triangle RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + +add_dependencies(gfxrecon-testapps gfxrecon-testapp-triangle) \ No newline at end of file From 8e7b58cd49f01d47289b804a10fbf93b424b4c97 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 19 Oct 2024 17:45:20 -0400 Subject: [PATCH 03/70] Move window functions to test app base --- test/test_apps/common/test_app_base.cpp | 39 +++++++++++++ test/test_apps/common/test_app_base.h | 17 ++++++ test/test_apps/triangle/triangle.cpp | 76 +++++++------------------ 3 files changed, 75 insertions(+), 57 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 1eec42daf5..041c3733ad 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -406,6 +406,12 @@ struct PhysicalDeviceErrorCategory : std::error_category { }; const PhysicalDeviceErrorCategory physical_device_error_category; +struct SDLErrorCategory : std::error_category { + const char* name() const noexcept override { return "gfxrecon_test_sdl"; } + std::string message(int err) const override { return to_string(static_cast(err)); } +}; +const SDLErrorCategory sdl_error_category; + struct QueueErrorCategory : std::error_category { const char* name() const noexcept override { return "gfxrecon_test_queue"; } std::string message(int err) const override { return to_string(static_cast(err)); } @@ -432,6 +438,9 @@ std::error_code make_error_code(InstanceError instance_error) { std::error_code make_error_code(PhysicalDeviceError physical_device_error) { return { static_cast(physical_device_error), detail::physical_device_error_category }; } +std::error_code make_error_code(SDLError sdl_error) { + return { static_cast(sdl_error), detail::sdl_error_category }; +} std::error_code make_error_code(QueueError queue_error) { return { static_cast(queue_error), detail::queue_error_category }; } @@ -470,6 +479,9 @@ const char* to_string(PhysicalDeviceError err) { return ""; } } +const char* to_string(SDLError err) { + return SDL_GetError(); +} const char* to_string(QueueError err) { switch (err) { CASE_TO_STRING(QueueError, present_unavailable) @@ -2205,6 +2217,33 @@ void SwapchainBuilder::add_desired_present_modes(std::vector& modes.push_back(VK_PRESENT_MODE_FIFO_KHR); } +Result create_window_sdl(const char* window_name, bool resizable, int width, int height) { + if (!SDL_Init(SDL_INIT_VIDEO)) return Result{SDLError::general}; + + SDL_WindowFlags flags = 0; + flags |= SDL_WINDOW_VULKAN; + if (resizable) flags |= SDL_WINDOW_RESIZABLE; + + auto window = SDL_CreateWindow(window_name, width, height, flags); + if (window == nullptr) return Result{SDLError::general}; + + return window; +} + +void destroy_window_sdl(SDL_Window* window) { + SDL_DestroyWindow(window); + SDL_Quit(); +} + +Result create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator) { + VkSurfaceKHR surface = VK_NULL_HANDLE; + if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) { + surface = VK_NULL_HANDLE; + return Result{SDLError::general}; + } + return surface; +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 802a74a7de..5e2c425cca 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -37,6 +37,9 @@ #include "test_app_dispatch.h" #include "util/defines.h" +#include +#include + #ifdef VK_MAKE_API_VERSION #define VKB_MAKE_VK_VERSION(variant, major, minor, patch) VK_MAKE_API_VERSION(variant, major, minor, patch) #elif defined(VK_MAKE_VERSION) @@ -226,6 +229,9 @@ enum class InstanceError { requested_extensions_not_present, windowing_extensions_not_present, }; +enum class SDLError { + general, +}; enum class PhysicalDeviceError { no_surface_provided, failed_enumerate_physical_devices, @@ -256,6 +262,7 @@ enum class SwapchainError { std::error_code make_error_code(InstanceError instance_error); std::error_code make_error_code(PhysicalDeviceError physical_device_error); +std::error_code make_error_code(SDLError sdl_error); std::error_code make_error_code(QueueError queue_error); std::error_code make_error_code(DeviceError device_error); std::error_code make_error_code(SwapchainError swapchain_error); @@ -265,6 +272,7 @@ const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s); const char* to_string(InstanceError err); const char* to_string(PhysicalDeviceError err); +const char* to_string(SDLError err); const char* to_string(QueueError err); const char* to_string(DeviceError err); const char* to_string(SwapchainError err); @@ -1009,6 +1017,14 @@ class SwapchainBuilder { } info; }; + +struct Void {}; +typedef Result VoidResult; +const Void TEST_SUCCESS = Void{}; + +Result create_window_sdl(const char* window_name, bool resizable, int width, int height); +void destroy_window_sdl(SDL_Window * window); +Result create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) @@ -1017,6 +1033,7 @@ namespace std { template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; +template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index c011928849..d0958cbe7f 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -30,12 +30,10 @@ #include -#include -#include -#include - #include +#include + GFXRECON_BEGIN_NAMESPACE(gfxrecon) GFXRECON_BEGIN_NAMESPACE(test_app) @@ -76,75 +74,35 @@ struct RenderData { size_t current_frame = 0; }; -SDL_Window* create_window_sdl(const char* window_name = "", bool resize = true) { - if (!SDL_Init(SDL_INIT_VIDEO)) { - std::cout << SDL_GetError() << std::endl; - return nullptr; - } - - SDL_WindowFlags flags = 0; - flags |= SDL_WINDOW_VULKAN; - if (resize) flags |= SDL_WINDOW_RESIZABLE; - - auto window = SDL_CreateWindow(window_name, 1024, 1024, flags); - if (window == nullptr) { - std::cout << SDL_GetError() << std::endl; - return nullptr; - } - return window; -} - -void destroy_window_sdl(SDL_Window * window) { - SDL_DestroyWindow(window); - SDL_Quit(); -} - -VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr) { - VkSurfaceKHR surface = VK_NULL_HANDLE; - if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) { - auto error = SDL_GetError(); - std::cout << error << std::endl; - surface = VK_NULL_HANDLE; - } - return surface; -} - -int device_initialization(Init& init) { - init.window = create_window_sdl("Vulkan Triangle", true); - if (init.window == nullptr) return -1; +gfxrecon::test::VoidResult device_initialization(Init& init) { + auto window_ret = gfxrecon::test::create_window_sdl("Vulkan Triangle", true, 1024, 1024); + if (!window_ret) return window_ret.error(); + init.window = window_ret.value(); gfxrecon::test::InstanceBuilder instance_builder; auto instance_ret = instance_builder.use_default_debug_messenger().request_validation_layers().build(); - if (!instance_ret) { - std::cout << instance_ret.error().message() << "\n"; - return -1; - } + if (!instance_ret) return instance_ret.error(); init.instance = instance_ret.value(); init.inst_disp = init.instance.make_table(); - init.surface = create_surface_sdl(init.instance, init.window); - if (init.surface == nullptr) return -1; + auto surface_ret = gfxrecon::test::create_surface_sdl(init.instance, init.window); + if (!surface_ret) return surface_ret.error(); + init.surface = surface_ret.value(); gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); auto phys_device_ret = phys_device_selector.set_surface(init.surface).select(); - if (!phys_device_ret) { - std::cout << phys_device_ret.error().message() << "\n"; - return -1; - } + if (!phys_device_ret) return phys_device_ret.error(); gfxrecon::test::PhysicalDevice physical_device = phys_device_ret.value(); gfxrecon::test::DeviceBuilder device_builder{ physical_device }; auto device_ret = device_builder.build(); - if (!device_ret) { - std::cout << device_ret.error().message() << "\n"; - return -1; - } + if (!device_ret) return device_ret.error(); init.device = device_ret.value(); init.disp = init.device.make_table(); - return 0; + return gfxrecon::test::TEST_SUCCESS; } int create_swapchain(Init& init) { @@ -611,7 +569,7 @@ void cleanup(Init& init, RenderData& data) { gfxrecon::test::destroy_device(init.device); gfxrecon::test::destroy_surface(init.instance, init.surface); gfxrecon::test::destroy_instance(init.instance); - destroy_window_sdl(init.window); + gfxrecon::test::destroy_window_sdl(init.window); } GFXRECON_END_NAMESPACE(triangle) @@ -626,7 +584,11 @@ int main(int argc, char *argv[]) { Init init; RenderData render_data; - if (0 != device_initialization(init)) return -1; + if (auto init_ret = device_initialization((init)); !init_ret) + { + std::cout << init_ret.error().message() << "\n"; + return -1; + } if (0 != create_swapchain(init)) return -1; if (0 != get_queues(init, render_data)) return -1; if (0 != create_render_pass(init, render_data)) return -1; From 5fff727c5bd3315c467876a170c98b456a830c2e Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 19 Oct 2024 21:06:52 -0400 Subject: [PATCH 04/70] Move swapchain creating to test app base --- test/test_apps/common/test_app_base.cpp | 11 +++++++ test/test_apps/common/test_app_base.h | 2 ++ test/test_apps/triangle/triangle.cpp | 39 +++++++++++++------------ 3 files changed, 33 insertions(+), 19 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 041c3733ad..fde329529b 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2244,6 +2244,17 @@ Result create_surface_sdl(VkInstance instance, SDL_Window * window return surface; } +VoidResult create_swapchain(Device const& device, Swapchain& swapchain) { + SwapchainBuilder swapchain_builder{ device }; + auto swap_ret = swapchain_builder.set_old_swapchain(swapchain).build(); + if (!swap_ret) return swap_ret.error(); + + destroy_swapchain(swapchain); + swapchain = swap_ret.value(); + + return TEST_SUCCESS; +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 5e2c425cca..b789e8ae38 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -1025,6 +1025,8 @@ const Void TEST_SUCCESS = Void{}; Result create_window_sdl(const char* window_name, bool resizable, int width, int height); void destroy_window_sdl(SDL_Window * window); Result create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); +VoidResult create_swapchain(Device const&, Swapchain& swapchain); + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index d0958cbe7f..bf930f4bcf 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -105,19 +105,6 @@ gfxrecon::test::VoidResult device_initialization(Init& init) { return gfxrecon::test::TEST_SUCCESS; } -int create_swapchain(Init& init) { - - gfxrecon::test::SwapchainBuilder swapchain_builder{ init.device }; - auto swap_ret = swapchain_builder.set_old_swapchain(init.swapchain).build(); - if (!swap_ret) { - std::cout << swap_ret.error().message() << " " << swap_ret.vk_result() << "\n"; - return -1; - } - gfxrecon::test::destroy_swapchain(init.swapchain); - init.swapchain = swap_ret.value(); - return 0; -} - int get_queues(Init& init, RenderData& data) { auto gq = init.device.get_queue(gfxrecon::test::QueueType::graphics); if (!gq.has_value()) { @@ -473,7 +460,13 @@ int recreate_swapchain(Init& init, RenderData& data) { init.swapchain.destroy_image_views(data.swapchain_image_views); - if (0 != create_swapchain(init)) return -1; + auto swapchain_ret = gfxrecon::test::create_swapchain(init.device, init.swapchain); + if (!swapchain_ret) + { + std::cout << swapchain_ret.error().message() << "\n"; + return -1; + } + if (0 != create_framebuffers(init, data)) return -1; if (0 != create_command_pool(init, data)) return -1; if (0 != create_command_buffers(init, data)) return -1; @@ -578,18 +571,28 @@ GFXRECON_END_NAMESPACE(test_app) GFXRECON_END_NAMESPACE(gfxrecon) +const int NUM_FRAMES = 10; + int main(int argc, char *argv[]) { using namespace gfxrecon::test_app::triangle; Init init; RenderData render_data; - if (auto init_ret = device_initialization((init)); !init_ret) + auto init_ret = device_initialization((init)); + if (!init_ret) { std::cout << init_ret.error().message() << "\n"; return -1; } - if (0 != create_swapchain(init)) return -1; + + auto swapchain_ret = gfxrecon::test::create_swapchain(init.device, init.swapchain); + if (!swapchain_ret) + { + std::cout << swapchain_ret.error().message() << "\n"; + return -1; + } + if (0 != get_queues(init, render_data)) return -1; if (0 != create_render_pass(init, render_data)) return -1; if (0 != create_graphics_pipeline(init, render_data)) return -1; @@ -598,12 +601,10 @@ int main(int argc, char *argv[]) { if (0 != create_command_buffers(init, render_data)) return -1; if (0 != create_sync_objects(init, render_data)) return -1; - bool running = true; - while (running) { + for (int frame = 0; frame < NUM_FRAMES; frame++) { SDL_Event windowEvent; while (SDL_PollEvent(&windowEvent)) { if (windowEvent.type == SDL_EVENT_QUIT) { - running = false; break; } } From ad9469a91642ce762a5ea2e1bea7ec286ebe580c Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 20 Oct 2024 19:38:27 -0400 Subject: [PATCH 05/70] Refactor framebuffer creation into test app base --- test/test_apps/common/test_app_base.cpp | 31 +++++++----- test/test_apps/common/test_app_base.h | 13 ++--- test/test_apps/triangle/triangle.cpp | 67 ++++++++++++++++++------- 3 files changed, 76 insertions(+), 35 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index fde329529b..715967672f 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -406,11 +406,11 @@ struct PhysicalDeviceErrorCategory : std::error_category { }; const PhysicalDeviceErrorCategory physical_device_error_category; -struct SDLErrorCategory : std::error_category { - const char* name() const noexcept override { return "gfxrecon_test_sdl"; } - std::string message(int err) const override { return to_string(static_cast(err)); } +struct GeneralErrorCategory : std::error_category { + const char* name() const noexcept override { return "gfxrecon_test_general"; } + std::string message(int err) const override { return to_string(static_cast(err)); } }; -const SDLErrorCategory sdl_error_category; +const GeneralErrorCategory general_error_category; struct QueueErrorCategory : std::error_category { const char* name() const noexcept override { return "gfxrecon_test_queue"; } @@ -438,8 +438,8 @@ std::error_code make_error_code(InstanceError instance_error) { std::error_code make_error_code(PhysicalDeviceError physical_device_error) { return { static_cast(physical_device_error), detail::physical_device_error_category }; } -std::error_code make_error_code(SDLError sdl_error) { - return { static_cast(sdl_error), detail::sdl_error_category }; +std::error_code make_error_code(GeneralError general_error) { + return { static_cast(general_error), detail::general_error_category }; } std::error_code make_error_code(QueueError queue_error) { return { static_cast(queue_error), detail::queue_error_category }; @@ -479,8 +479,15 @@ const char* to_string(PhysicalDeviceError err) { return ""; } } -const char* to_string(SDLError err) { - return SDL_GetError(); +const char* to_string(GeneralError err) { + switch (err) { + case GeneralError::sdl: + return SDL_GetError(); + case GeneralError::unexpected: + return "unexpected error"; + default: + return ""; + } } const char* to_string(QueueError err) { switch (err) { @@ -2218,14 +2225,14 @@ void SwapchainBuilder::add_desired_present_modes(std::vector& } Result create_window_sdl(const char* window_name, bool resizable, int width, int height) { - if (!SDL_Init(SDL_INIT_VIDEO)) return Result{SDLError::general}; + if (!SDL_Init(SDL_INIT_VIDEO)) return Result{GeneralError::sdl}; SDL_WindowFlags flags = 0; flags |= SDL_WINDOW_VULKAN; if (resizable) flags |= SDL_WINDOW_RESIZABLE; auto window = SDL_CreateWindow(window_name, width, height, flags); - if (window == nullptr) return Result{SDLError::general}; + if (window == nullptr) return Result{GeneralError::sdl}; return window; } @@ -2239,7 +2246,7 @@ Result create_surface_sdl(VkInstance instance, SDL_Window * window VkSurfaceKHR surface = VK_NULL_HANDLE; if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) { surface = VK_NULL_HANDLE; - return Result{SDLError::general}; + return Result{GeneralError::sdl}; } return surface; } @@ -2252,7 +2259,7 @@ VoidResult create_swapchain(Device const& device, Swapchain& swapchain) { destroy_swapchain(swapchain); swapchain = swap_ret.value(); - return TEST_SUCCESS; + return SUCCESS; } GFXRECON_END_NAMESPACE(test) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index b789e8ae38..6c4c4907d1 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -229,8 +229,9 @@ enum class InstanceError { requested_extensions_not_present, windowing_extensions_not_present, }; -enum class SDLError { - general, +enum class GeneralError { + sdl, + unexpected, }; enum class PhysicalDeviceError { no_surface_provided, @@ -262,7 +263,7 @@ enum class SwapchainError { std::error_code make_error_code(InstanceError instance_error); std::error_code make_error_code(PhysicalDeviceError physical_device_error); -std::error_code make_error_code(SDLError sdl_error); +std::error_code make_error_code(GeneralError general_error); std::error_code make_error_code(QueueError queue_error); std::error_code make_error_code(DeviceError device_error); std::error_code make_error_code(SwapchainError swapchain_error); @@ -272,7 +273,7 @@ const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s); const char* to_string(InstanceError err); const char* to_string(PhysicalDeviceError err); -const char* to_string(SDLError err); +const char* to_string(GeneralError err); const char* to_string(QueueError err); const char* to_string(DeviceError err); const char* to_string(SwapchainError err); @@ -1020,7 +1021,7 @@ class SwapchainBuilder { struct Void {}; typedef Result VoidResult; -const Void TEST_SUCCESS = Void{}; +const Void SUCCESS = Void{}; Result create_window_sdl(const char* window_name, bool resizable, int width, int height); void destroy_window_sdl(SDL_Window * window); @@ -1035,7 +1036,7 @@ namespace std { template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; -template <> struct is_error_code_enum : true_type {}; +template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; template <> struct is_error_code_enum : true_type {}; diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index bf930f4bcf..a58dd892fa 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -102,7 +102,7 @@ gfxrecon::test::VoidResult device_initialization(Init& init) { init.disp = init.device.make_table(); - return gfxrecon::test::TEST_SUCCESS; + return gfxrecon::test::SUCCESS; } int get_queues(Init& init, RenderData& data) { @@ -326,29 +326,32 @@ int create_graphics_pipeline(Init& init, RenderData& data) { return 0; } -int create_framebuffers(Init& init, RenderData& data) { - data.swapchain_images = init.swapchain.get_images().value(); - data.swapchain_image_views = init.swapchain.get_image_views().value(); - - data.framebuffers.resize(data.swapchain_image_views.size()); +gfxrecon::test::VoidResult create_framebuffers( + gfxrecon::test::Swapchain const& swapchain, + gfxrecon::test::DispatchTable const& disp, + std::vector& framebuffers, + std::vector& swapchain_image_views, + VkRenderPass render_pass +) { + framebuffers.resize(swapchain_image_views.size()); - for (size_t i = 0; i < data.swapchain_image_views.size(); i++) { - VkImageView attachments[] = { data.swapchain_image_views[i] }; + for (size_t i = 0; i < swapchain_image_views.size(); i++) { + VkImageView attachments[] = { swapchain_image_views[i] }; VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebuffer_info.renderPass = data.render_pass; + framebuffer_info.renderPass = render_pass; framebuffer_info.attachmentCount = 1; framebuffer_info.pAttachments = attachments; - framebuffer_info.width = init.swapchain.extent.width; - framebuffer_info.height = init.swapchain.extent.height; + framebuffer_info.width = swapchain.extent.width; + framebuffer_info.height = swapchain.extent.height; framebuffer_info.layers = 1; - if (init.disp.createFramebuffer(&framebuffer_info, nullptr, &data.framebuffers[i]) != VK_SUCCESS) { - return -1; // failed to create framebuffer - } + auto result = disp.createFramebuffer(&framebuffer_info, nullptr, &framebuffers[i]); + if (result != VK_SUCCESS) return gfxrecon::test::VoidResult{gfxrecon::test::GeneralError::unexpected, result}; } - return 0; + + return gfxrecon::test::SUCCESS; } int create_command_pool(Init& init, RenderData& data) { @@ -467,7 +470,21 @@ int recreate_swapchain(Init& init, RenderData& data) { return -1; } - if (0 != create_framebuffers(init, data)) return -1; + data.swapchain_images = init.swapchain.get_images().value(); + data.swapchain_image_views = init.swapchain.get_image_views().value(); + + auto framebuffer_ret = create_framebuffers( + init.swapchain, + init.disp, + data.framebuffers, + data.swapchain_image_views, + data.render_pass + ); + if (!framebuffer_ret) { + std::cout << framebuffer_ret.error().message() << "\n"; + return -1; + } + if (0 != create_command_pool(init, data)) return -1; if (0 != create_command_buffers(init, data)) return -1; return 0; @@ -596,7 +613,23 @@ int main(int argc, char *argv[]) { if (0 != get_queues(init, render_data)) return -1; if (0 != create_render_pass(init, render_data)) return -1; if (0 != create_graphics_pipeline(init, render_data)) return -1; - if (0 != create_framebuffers(init, render_data)) return -1; + + render_data.swapchain_images = init.swapchain.get_images().value(); + render_data.swapchain_image_views = init.swapchain.get_image_views().value(); + + auto framebuffer_ret = create_framebuffers( + init.swapchain, + init.disp, + render_data.framebuffers, + render_data.swapchain_image_views, + render_data.render_pass + ); + if (!framebuffer_ret) + { + std::cout << framebuffer_ret.error().message() << "\n"; + return -1; + } + if (0 != create_command_pool(init, render_data)) return -1; if (0 != create_command_buffers(init, render_data)) return -1; if (0 != create_sync_objects(init, render_data)) return -1; From a478c15af9e0a9ef0efafa33cc1d5aef58816002 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 20 Oct 2024 19:51:36 -0400 Subject: [PATCH 06/70] Refactor command pool creation to test_app_base --- test/test_apps/common/test_app_base.cpp | 16 +++++++++++++ test/test_apps/common/test_app_base.h | 2 ++ test/test_apps/triangle/triangle.cpp | 32 ++++++++++++++----------- 3 files changed, 36 insertions(+), 14 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 715967672f..38e8e54954 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2262,6 +2262,22 @@ VoidResult create_swapchain(Device const& device, Swapchain& swapchain) { return SUCCESS; } +Result create_command_pool( + DispatchTable const& disp, + uint32_t queue_family_index +) { + VkCommandPoolCreateInfo pool_info = {}; + pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + pool_info.queueFamilyIndex = queue_family_index; + + VkCommandPool command_pool; + auto result = disp.createCommandPool(&pool_info, nullptr, &command_pool); + if (result != VK_SUCCESS) { + return Result{GeneralError::unexpected, result}; + } + return command_pool; +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 6c4c4907d1..21cbc05caf 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -1028,6 +1028,8 @@ void destroy_window_sdl(SDL_Window * window); Result create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); VoidResult create_swapchain(Device const&, Swapchain& swapchain); +Result create_command_pool(DispatchTable const& disp, uint32_t queue_family_index); + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index a58dd892fa..810289cad1 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -354,18 +354,6 @@ gfxrecon::test::VoidResult create_framebuffers( return gfxrecon::test::SUCCESS; } -int create_command_pool(Init& init, RenderData& data) { - VkCommandPoolCreateInfo pool_info = {}; - pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; - pool_info.queueFamilyIndex = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); - - if (init.disp.createCommandPool(&pool_info, nullptr, &data.command_pool) != VK_SUCCESS) { - std::cout << "failed to create command pool\n"; - return -1; // failed to create command pool - } - return 0; -} - int create_command_buffers(Init& init, RenderData& data) { data.command_buffers.resize(data.framebuffers.size()); @@ -485,7 +473,15 @@ int recreate_swapchain(Init& init, RenderData& data) { return -1; } - if (0 != create_command_pool(init, data)) return -1; + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); + auto command_pool_ret = create_command_pool(init.disp, queue_family_index); + if (!command_pool_ret) + { + std::cout << command_pool_ret.error().message() << "\n"; + return -1; + } + data.command_pool = command_pool_ret.value(); + if (0 != create_command_buffers(init, data)) return -1; return 0; } @@ -630,7 +626,15 @@ int main(int argc, char *argv[]) { return -1; } - if (0 != create_command_pool(init, render_data)) return -1; + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); + auto command_pool_ret = create_command_pool(init.disp, queue_family_index); + if (!command_pool_ret) + { + std::cout << command_pool_ret.error().message() << "\n"; + return -1; + } + render_data.command_pool = command_pool_ret.value(); + if (0 != create_command_buffers(init, render_data)) return -1; if (0 != create_sync_objects(init, render_data)) return -1; From 81c25c87ba180a8ab811af76f8a5cece08bc5ee4 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 08:40:32 -0400 Subject: [PATCH 07/70] Separate sync object from render data --- test/test_apps/triangle/triangle.cpp | 55 +++++++++++++++------------- 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 810289cad1..47b9a61ebe 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -52,6 +52,13 @@ struct Init { gfxrecon::test::Swapchain swapchain; }; +struct Sync { + std::vector available_semaphores; + std::vector finished_semaphore; + std::vector in_flight_fences; + std::vector image_in_flight; +}; + struct RenderData { VkQueue graphics_queue; VkQueue present_queue; @@ -67,11 +74,9 @@ struct RenderData { VkCommandPool command_pool; std::vector command_buffers; - std::vector available_semaphores; - std::vector finished_semaphore; - std::vector in_flight_fences; - std::vector image_in_flight; size_t current_frame = 0; + + Sync sync; }; gfxrecon::test::VoidResult device_initialization(Init& init) { @@ -416,11 +421,11 @@ int create_command_buffers(Init& init, RenderData& data) { return 0; } -int create_sync_objects(Init& init, RenderData& data) { - data.available_semaphores.resize(MAX_FRAMES_IN_FLIGHT); - data.finished_semaphore.resize(MAX_FRAMES_IN_FLIGHT); - data.in_flight_fences.resize(MAX_FRAMES_IN_FLIGHT); - data.image_in_flight.resize(init.swapchain.image_count, VK_NULL_HANDLE); +int create_sync_objects(Init& init, Sync& sync) { + sync.available_semaphores.resize(MAX_FRAMES_IN_FLIGHT); + sync.finished_semaphore.resize(MAX_FRAMES_IN_FLIGHT); + sync.in_flight_fences.resize(MAX_FRAMES_IN_FLIGHT); + sync.image_in_flight.resize(init.swapchain.image_count, VK_NULL_HANDLE); VkSemaphoreCreateInfo semaphore_info = {}; semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; @@ -430,9 +435,9 @@ int create_sync_objects(Init& init, RenderData& data) { fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - if (init.disp.createSemaphore(&semaphore_info, nullptr, &data.available_semaphores[i]) != VK_SUCCESS || - init.disp.createSemaphore(&semaphore_info, nullptr, &data.finished_semaphore[i]) != VK_SUCCESS || - init.disp.createFence(&fence_info, nullptr, &data.in_flight_fences[i]) != VK_SUCCESS) { + if (init.disp.createSemaphore(&semaphore_info, nullptr, &sync.available_semaphores[i]) != VK_SUCCESS || + init.disp.createSemaphore(&semaphore_info, nullptr, &sync.finished_semaphore[i]) != VK_SUCCESS || + init.disp.createFence(&fence_info, nullptr, &sync.in_flight_fences[i]) != VK_SUCCESS) { std::cout << "failed to create sync objects\n"; return -1; // failed to create synchronization objects for a frame } @@ -487,11 +492,11 @@ int recreate_swapchain(Init& init, RenderData& data) { } int draw_frame(Init& init, RenderData& data) { - init.disp.waitForFences(1, &data.in_flight_fences[data.current_frame], VK_TRUE, UINT64_MAX); + init.disp.waitForFences(1, &data.sync.in_flight_fences[data.current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; VkResult result = init.disp.acquireNextImageKHR( - init.swapchain, UINT64_MAX, data.available_semaphores[data.current_frame], VK_NULL_HANDLE, &image_index); + init.swapchain, UINT64_MAX, data.sync.available_semaphores[data.current_frame], VK_NULL_HANDLE, &image_index); if (result == VK_ERROR_OUT_OF_DATE_KHR) { return recreate_swapchain(init, data); @@ -500,15 +505,15 @@ int draw_frame(Init& init, RenderData& data) { return -1; } - if (data.image_in_flight[image_index] != VK_NULL_HANDLE) { - init.disp.waitForFences(1, &data.image_in_flight[image_index], VK_TRUE, UINT64_MAX); + if (data.sync.image_in_flight[image_index] != VK_NULL_HANDLE) { + init.disp.waitForFences(1, &data.sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); } - data.image_in_flight[image_index] = data.in_flight_fences[data.current_frame]; + data.sync.image_in_flight[image_index] = data.sync.in_flight_fences[data.current_frame]; VkSubmitInfo submitInfo = {}; submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - VkSemaphore wait_semaphores[] = { data.available_semaphores[data.current_frame] }; + VkSemaphore wait_semaphores[] = { data.sync.available_semaphores[data.current_frame] }; VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; submitInfo.waitSemaphoreCount = 1; submitInfo.pWaitSemaphores = wait_semaphores; @@ -517,13 +522,13 @@ int draw_frame(Init& init, RenderData& data) { submitInfo.commandBufferCount = 1; submitInfo.pCommandBuffers = &data.command_buffers[image_index]; - VkSemaphore signal_semaphores[] = { data.finished_semaphore[data.current_frame] }; + VkSemaphore signal_semaphores[] = { data.sync.finished_semaphore[data.current_frame] }; submitInfo.signalSemaphoreCount = 1; submitInfo.pSignalSemaphores = signal_semaphores; - init.disp.resetFences(1, &data.in_flight_fences[data.current_frame]); + init.disp.resetFences(1, &data.sync.in_flight_fences[data.current_frame]); - if (init.disp.queueSubmit(data.graphics_queue, 1, &submitInfo, data.in_flight_fences[data.current_frame]) != VK_SUCCESS) { + if (init.disp.queueSubmit(data.graphics_queue, 1, &submitInfo, data.sync.in_flight_fences[data.current_frame]) != VK_SUCCESS) { std::cout << "failed to submit draw command buffer\n"; return -1; //"failed to submit draw command buffer } @@ -554,9 +559,9 @@ int draw_frame(Init& init, RenderData& data) { void cleanup(Init& init, RenderData& data) { for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - init.disp.destroySemaphore(data.finished_semaphore[i], nullptr); - init.disp.destroySemaphore(data.available_semaphores[i], nullptr); - init.disp.destroyFence(data.in_flight_fences[i], nullptr); + init.disp.destroySemaphore(data.sync.finished_semaphore[i], nullptr); + init.disp.destroySemaphore(data.sync.available_semaphores[i], nullptr); + init.disp.destroyFence(data.sync.in_flight_fences[i], nullptr); } init.disp.destroyCommandPool(data.command_pool, nullptr); @@ -636,7 +641,7 @@ int main(int argc, char *argv[]) { render_data.command_pool = command_pool_ret.value(); if (0 != create_command_buffers(init, render_data)) return -1; - if (0 != create_sync_objects(init, render_data)) return -1; + if (0 != create_sync_objects(init, render_data.sync)) return -1; for (int frame = 0; frame < NUM_FRAMES; frame++) { SDL_Event windowEvent; From b9dd99ed128087b939af091b2997474741cc17de Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 10:13:26 -0400 Subject: [PATCH 08/70] Move sync object creation into test app base --- test/test_apps/common/test_app_base.cpp | 25 +++++++++++++++ test/test_apps/common/test_app_base.h | 19 ++++++++++- test/test_apps/triangle/triangle.cpp | 42 ++++++------------------- 3 files changed, 52 insertions(+), 34 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 38e8e54954..1245bed502 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2278,6 +2278,31 @@ Result create_command_pool( return command_pool; } +Result create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight) { + Sync sync; + + sync.available_semaphores.resize(max_frames_in_flight); + sync.finished_semaphore.resize(max_frames_in_flight); + sync.in_flight_fences.resize(max_frames_in_flight); + sync.image_in_flight.resize(swapchain.image_count, VK_NULL_HANDLE); + + VkSemaphoreCreateInfo semaphore_info = {}; + semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + + VkFenceCreateInfo fence_info = {}; + fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; + + for (size_t i = 0; i < max_frames_in_flight; i++) { + if (disp.createSemaphore(&semaphore_info, nullptr, &sync.available_semaphores[i]) != VK_SUCCESS || + disp.createSemaphore(&semaphore_info, nullptr, &sync.finished_semaphore[i]) != VK_SUCCESS || + disp.createFence(&fence_info, nullptr, &sync.in_flight_fences[i]) != VK_SUCCESS) { + return gfxrecon::test::Result{gfxrecon::test::GeneralError::unexpected}; + } + } + return sync; +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 21cbc05caf..0411d2c911 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -1018,7 +1018,6 @@ class SwapchainBuilder { } info; }; - struct Void {}; typedef Result VoidResult; const Void SUCCESS = Void{}; @@ -1030,6 +1029,24 @@ VoidResult create_swapchain(Device const&, Swapchain& swapchain); Result create_command_pool(DispatchTable const& disp, uint32_t queue_family_index); +struct Sync { + std::vector available_semaphores; + std::vector finished_semaphore; + std::vector in_flight_fences; + std::vector image_in_flight; + + Sync() = default; + ~Sync() = default; + + Sync(const Sync&) = delete; + Sync& operator =(const Sync&) = delete; + + Sync(Sync&&) = default; + Sync& operator =(Sync&&) = default; +}; + +Result create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight); + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 47b9a61ebe..ad48ff3391 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -52,13 +52,6 @@ struct Init { gfxrecon::test::Swapchain swapchain; }; -struct Sync { - std::vector available_semaphores; - std::vector finished_semaphore; - std::vector in_flight_fences; - std::vector image_in_flight; -}; - struct RenderData { VkQueue graphics_queue; VkQueue present_queue; @@ -76,7 +69,7 @@ struct RenderData { size_t current_frame = 0; - Sync sync; + gfxrecon::test::Sync sync; }; gfxrecon::test::VoidResult device_initialization(Init& init) { @@ -421,30 +414,6 @@ int create_command_buffers(Init& init, RenderData& data) { return 0; } -int create_sync_objects(Init& init, Sync& sync) { - sync.available_semaphores.resize(MAX_FRAMES_IN_FLIGHT); - sync.finished_semaphore.resize(MAX_FRAMES_IN_FLIGHT); - sync.in_flight_fences.resize(MAX_FRAMES_IN_FLIGHT); - sync.image_in_flight.resize(init.swapchain.image_count, VK_NULL_HANDLE); - - VkSemaphoreCreateInfo semaphore_info = {}; - semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; - - VkFenceCreateInfo fence_info = {}; - fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; - fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; - - for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - if (init.disp.createSemaphore(&semaphore_info, nullptr, &sync.available_semaphores[i]) != VK_SUCCESS || - init.disp.createSemaphore(&semaphore_info, nullptr, &sync.finished_semaphore[i]) != VK_SUCCESS || - init.disp.createFence(&fence_info, nullptr, &sync.in_flight_fences[i]) != VK_SUCCESS) { - std::cout << "failed to create sync objects\n"; - return -1; // failed to create synchronization objects for a frame - } - } - return 0; -} - int recreate_swapchain(Init& init, RenderData& data) { init.disp.deviceWaitIdle(); @@ -641,7 +610,14 @@ int main(int argc, char *argv[]) { render_data.command_pool = command_pool_ret.value(); if (0 != create_command_buffers(init, render_data)) return -1; - if (0 != create_sync_objects(init, render_data.sync)) return -1; + + auto sync_ret = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); + if (!sync_ret) + { + std::cout << command_pool_ret.error().message() << "\n"; + return -1; + } + render_data.sync = std::move(sync_ret.value()); for (int frame = 0; frame < NUM_FRAMES; frame++) { SDL_Event windowEvent; From 025cb13c430d0e420f5d123e43aba4e3568d9877 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 10:56:22 -0400 Subject: [PATCH 09/70] Move triangle runner into namespace --- test/test_apps/triangle/triangle.cpp | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index ad48ff3391..a4c4b40cdb 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -448,7 +448,7 @@ int recreate_swapchain(Init& init, RenderData& data) { } auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); - auto command_pool_ret = create_command_pool(init.disp, queue_family_index); + auto command_pool_ret = gfxrecon::test::create_command_pool(init.disp, queue_family_index); if (!command_pool_ret) { std::cout << command_pool_ret.error().message() << "\n"; @@ -552,17 +552,9 @@ void cleanup(Init& init, RenderData& data) { gfxrecon::test::destroy_window_sdl(init.window); } -GFXRECON_END_NAMESPACE(triangle) - -GFXRECON_END_NAMESPACE(test_app) - -GFXRECON_END_NAMESPACE(gfxrecon) - const int NUM_FRAMES = 10; -int main(int argc, char *argv[]) { - using namespace gfxrecon::test_app::triangle; - +int run() { Init init; RenderData render_data; @@ -601,7 +593,7 @@ int main(int argc, char *argv[]) { } auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); - auto command_pool_ret = create_command_pool(init.disp, queue_family_index); + auto command_pool_ret = gfxrecon::test::create_command_pool(init.disp, queue_family_index); if (!command_pool_ret) { std::cout << command_pool_ret.error().message() << "\n"; @@ -639,3 +631,13 @@ int main(int argc, char *argv[]) { cleanup(init, render_data); return 0; } + +GFXRECON_END_NAMESPACE(triangle) + +GFXRECON_END_NAMESPACE(test_app) + +GFXRECON_END_NAMESPACE(gfxrecon) + +int main(int argc, char *argv[]) { + return gfxrecon::test_app::triangle::run(); +} From 731d6f60e8f42701e9f4ae76cb5c69a3e55cfe26 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 11:35:21 -0400 Subject: [PATCH 10/70] Load shader module from file --- test/test_apps/common/test_app_base.cpp | 39 +++++++++++++++++++ test/test_apps/common/test_app_base.h | 6 +++ test/test_apps/triangle/triangle.cpp | 50 ++++++------------------- 3 files changed, 56 insertions(+), 39 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 1245bed502..1c2d239db1 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -39,6 +39,7 @@ #include #include +#include GFXRECON_BEGIN_NAMESPACE(gfxrecon) @@ -2303,6 +2304,44 @@ Result create_sync_objects(Swapchain const& swapchain, DispatchTable const return sync; } +std::vector readFile(const std::string& filename) { + std::ifstream file(filename, std::ios::ate | std::ios::binary); + + if (!file.is_open()) { + throw std::runtime_error("failed to open file!"); + } + + size_t file_size = (size_t)file.tellg(); + std::vector buffer(file_size); + + file.seekg(0); + file.read(buffer.data(), static_cast(file_size)); + + file.close(); + + return buffer; +} + +Result createShaderModule(DispatchTable const& disp, const std::vector& code) { + VkShaderModuleCreateInfo create_info = {}; + create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + create_info.codeSize = code.size(); + create_info.pCode = reinterpret_cast(code.data()); + + VkShaderModule shaderModule; + auto result = disp.createShaderModule(&create_info, nullptr, &shaderModule); + if (result != VK_SUCCESS) { + return Result{GeneralError::unexpected, result}; + } + + return shaderModule; +} + +Result readShaderFromFile(DispatchTable const& disp, const std::string& filename) { + std::vector code = readFile(filename); + return createShaderModule(disp, code); +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 0411d2c911..c93ef10bed 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -1047,6 +1047,12 @@ struct Sync { Result create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight); +std::vector readFile(const std::string& filename); + +Result createShaderModule(DispatchTable const& disp, const std::vector& code); + +Result readShaderFromFile(DispatchTable const& disp, const std::string& filename); + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index a4c4b40cdb..814d5cee13 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -164,48 +164,20 @@ int create_render_pass(Init& init, RenderData& data) { return 0; } -std::vector readFile(const std::string& filename) { - std::ifstream file(filename, std::ios::ate | std::ios::binary); - - if (!file.is_open()) { - throw std::runtime_error("failed to open file!"); - } - - size_t file_size = (size_t)file.tellg(); - std::vector buffer(file_size); - - file.seekg(0); - file.read(buffer.data(), static_cast(file_size)); - - file.close(); - - return buffer; -} - -VkShaderModule createShaderModule(Init& init, const std::vector& code) { - VkShaderModuleCreateInfo create_info = {}; - create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - create_info.codeSize = code.size(); - create_info.pCode = reinterpret_cast(code.data()); - - VkShaderModule shaderModule; - if (init.disp.createShaderModule(&create_info, nullptr, &shaderModule) != VK_SUCCESS) { - return VK_NULL_HANDLE; // failed to create shader module +int create_graphics_pipeline(Init& init, RenderData& data) { + auto vert_module_ret = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); + if (!vert_module_ret) { + std::cout << vert_module_ret.error().message() << "\n"; + return -1; } + auto vert_module = vert_module_ret.value(); - return shaderModule; -} - -int create_graphics_pipeline(Init& init, RenderData& data) { - auto vert_code = readFile("vert.spv"); - auto frag_code = readFile("frag.spv"); - - VkShaderModule vert_module = createShaderModule(init, vert_code); - VkShaderModule frag_module = createShaderModule(init, frag_code); - if (vert_module == VK_NULL_HANDLE || frag_module == VK_NULL_HANDLE) { - std::cout << "failed to create shader module\n"; - return -1; // failed to create shader modules + auto frag_module_ret = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); + if (!frag_module_ret) { + std::cout << frag_module_ret.error().message() << "\n"; + return -1; } + auto frag_module = frag_module_ret.value(); VkPipelineShaderStageCreateInfo vert_stage_info = {}; vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; From f04f20b5760dbb83381f7d8107bf2b8b61de4eaa Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 14:17:44 -0400 Subject: [PATCH 11/70] Refactor errors --- .../vk_enum_string_helper.h | 9617 +++++++++++++++++ test/test_apps/common/test_app_base.cpp | 467 +- test/test_apps/common/test_app_base.h | 211 +- test/test_apps/triangle/triangle.cpp | 231 +- 4 files changed, 9968 insertions(+), 558 deletions(-) create mode 100644 external/Vulkan-Utility-Libraries/vk_enum_string_helper.h diff --git a/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h b/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h new file mode 100644 index 0000000000..eebd7468a2 --- /dev/null +++ b/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h @@ -0,0 +1,9617 @@ +// *** THIS FILE IS GENERATED - DO NOT EDIT *** +// See enum_string_helper_generator.py for modifications +// Copyright 2023 The Khronos Group Inc. +// Copyright 2023 Valve Corporation +// Copyright 2023 LunarG, Inc. +// +// SPDX-License-Identifier: Apache-2.0 + +#pragma once + +// clang-format off + +#ifdef __cplusplus +#include +#endif +#include +static inline const char* string_VkResult(VkResult input_value) { + switch (input_value) { + case VK_SUCCESS: + return "VK_SUCCESS"; + case VK_NOT_READY: + return "VK_NOT_READY"; + case VK_TIMEOUT: + return "VK_TIMEOUT"; + case VK_EVENT_SET: + return "VK_EVENT_SET"; + case VK_EVENT_RESET: + return "VK_EVENT_RESET"; + case VK_INCOMPLETE: + return "VK_INCOMPLETE"; + case VK_ERROR_OUT_OF_HOST_MEMORY: + return "VK_ERROR_OUT_OF_HOST_MEMORY"; + case VK_ERROR_OUT_OF_DEVICE_MEMORY: + return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; + case VK_ERROR_INITIALIZATION_FAILED: + return "VK_ERROR_INITIALIZATION_FAILED"; + case VK_ERROR_DEVICE_LOST: + return "VK_ERROR_DEVICE_LOST"; + case VK_ERROR_MEMORY_MAP_FAILED: + return "VK_ERROR_MEMORY_MAP_FAILED"; + case VK_ERROR_LAYER_NOT_PRESENT: + return "VK_ERROR_LAYER_NOT_PRESENT"; + case VK_ERROR_EXTENSION_NOT_PRESENT: + return "VK_ERROR_EXTENSION_NOT_PRESENT"; + case VK_ERROR_FEATURE_NOT_PRESENT: + return "VK_ERROR_FEATURE_NOT_PRESENT"; + case VK_ERROR_INCOMPATIBLE_DRIVER: + return "VK_ERROR_INCOMPATIBLE_DRIVER"; + case VK_ERROR_TOO_MANY_OBJECTS: + return "VK_ERROR_TOO_MANY_OBJECTS"; + case VK_ERROR_FORMAT_NOT_SUPPORTED: + return "VK_ERROR_FORMAT_NOT_SUPPORTED"; + case VK_ERROR_FRAGMENTED_POOL: + return "VK_ERROR_FRAGMENTED_POOL"; + case VK_ERROR_UNKNOWN: + return "VK_ERROR_UNKNOWN"; + case VK_ERROR_OUT_OF_POOL_MEMORY: + return "VK_ERROR_OUT_OF_POOL_MEMORY"; + case VK_ERROR_INVALID_EXTERNAL_HANDLE: + return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; + case VK_ERROR_FRAGMENTATION: + return "VK_ERROR_FRAGMENTATION"; + case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: + return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS"; + case VK_PIPELINE_COMPILE_REQUIRED: + return "VK_PIPELINE_COMPILE_REQUIRED"; + case VK_ERROR_SURFACE_LOST_KHR: + return "VK_ERROR_SURFACE_LOST_KHR"; + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: + return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; + case VK_SUBOPTIMAL_KHR: + return "VK_SUBOPTIMAL_KHR"; + case VK_ERROR_OUT_OF_DATE_KHR: + return "VK_ERROR_OUT_OF_DATE_KHR"; + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: + return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; + case VK_ERROR_VALIDATION_FAILED_EXT: + return "VK_ERROR_VALIDATION_FAILED_EXT"; + case VK_ERROR_INVALID_SHADER_NV: + return "VK_ERROR_INVALID_SHADER_NV"; + case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: + return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: + return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR"; + case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: + return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; + case VK_ERROR_NOT_PERMITTED_KHR: + return "VK_ERROR_NOT_PERMITTED_KHR"; + case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: + return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; + case VK_THREAD_IDLE_KHR: + return "VK_THREAD_IDLE_KHR"; + case VK_THREAD_DONE_KHR: + return "VK_THREAD_DONE_KHR"; + case VK_OPERATION_DEFERRED_KHR: + return "VK_OPERATION_DEFERRED_KHR"; + case VK_OPERATION_NOT_DEFERRED_KHR: + return "VK_OPERATION_NOT_DEFERRED_KHR"; + case VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR: + return "VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR"; + case VK_ERROR_COMPRESSION_EXHAUSTED_EXT: + return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT"; + case VK_INCOMPATIBLE_SHADER_BINARY_EXT: + return "VK_INCOMPATIBLE_SHADER_BINARY_EXT"; + case VK_PIPELINE_BINARY_MISSING_KHR: + return "VK_PIPELINE_BINARY_MISSING_KHR"; + case VK_ERROR_NOT_ENOUGH_SPACE_KHR: + return "VK_ERROR_NOT_ENOUGH_SPACE_KHR"; + default: + return "Unhandled VkResult"; + } +} +static inline const char* string_VkStructureType(VkStructureType input_value) { + switch (input_value) { + case VK_STRUCTURE_TYPE_APPLICATION_INFO: + return "VK_STRUCTURE_TYPE_APPLICATION_INFO"; + case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE: + return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE"; + case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO: + return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO"; + case VK_STRUCTURE_TYPE_FENCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EVENT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO: + return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET"; + case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET: + return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER: + return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER"; + case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER: + return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER"; + case VK_STRUCTURE_TYPE_MEMORY_BARRIER: + return "VK_STRUCTURE_TYPE_MEMORY_BARRIER"; + case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES"; + case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES"; + case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: + return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS"; + case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO"; + case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: + return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2: + return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2"; + case VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2: + return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2"; + case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2: + return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2"; + case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2: + return "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2"; + case VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2: + return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2: + return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES"; + case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES"; + case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO"; + case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES"; + case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO"; + case VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES: + return "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES"; + case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO"; + case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2: + return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2"; + case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2: + return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2"; + case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2: + return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2"; + case VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2: + return "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2"; + case VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_SUBPASS_END_INFO: + return "VK_STRUCTURE_TYPE_SUBPASS_END_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES"; + case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE: + return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES"; + case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES"; + case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO"; + case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO: + return "VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES"; + case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT: + return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT"; + case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT: + return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES"; + case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO"; + case VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES"; + case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO"; + case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO: + return "VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO"; + case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO: + return "VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO"; + case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES"; + case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES"; + case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES"; + case VK_STRUCTURE_TYPE_MEMORY_BARRIER_2: + return "VK_STRUCTURE_TYPE_MEMORY_BARRIER_2"; + case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2: + return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2"; + case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2: + return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2"; + case VK_STRUCTURE_TYPE_DEPENDENCY_INFO: + return "VK_STRUCTURE_TYPE_DEPENDENCY_INFO"; + case VK_STRUCTURE_TYPE_SUBMIT_INFO_2: + return "VK_STRUCTURE_TYPE_SUBMIT_INFO_2"; + case VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES"; + case VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2"; + case VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2: + return "VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2"; + case VK_STRUCTURE_TYPE_BUFFER_COPY_2: + return "VK_STRUCTURE_TYPE_BUFFER_COPY_2"; + case VK_STRUCTURE_TYPE_IMAGE_COPY_2: + return "VK_STRUCTURE_TYPE_IMAGE_COPY_2"; + case VK_STRUCTURE_TYPE_IMAGE_BLIT_2: + return "VK_STRUCTURE_TYPE_IMAGE_BLIT_2"; + case VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2: + return "VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2"; + case VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2: + return "VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES"; + case VK_STRUCTURE_TYPE_RENDERING_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_INFO"; + case VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO: + return "VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO"; + case VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO: + return "VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES"; + case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3: + return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES"; + case VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS: + return "VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS"; + case VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS: + return "VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: + return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD"; + case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT"; + case VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR"; + case VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR: + return "VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX"; + case VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX"; + case VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX: + return "VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD: + return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD"; + case VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP: + return "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"; + case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: + return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV"; + case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT: + return "VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT"; + case VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN: + return "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: + return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR: + return "VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR"; + case VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT: + return "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT"; + case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR: + return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT"; + case VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT: + return "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE: + return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX"; + case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX: + return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_HDR_METADATA_EXT: + return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG"; + case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR: + return "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR: + return "VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR"; + case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR: + return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR"; + case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR: + return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR"; + case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR: + return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR"; + case VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR: + return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR"; + case VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK: + return "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK"; + case VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK: + return "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT"; + case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: + return "VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"; + case VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: + return "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"; + case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID: + return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX"; + case VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX: + return "VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX"; + case VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX: + return "VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD: + return "VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD"; + case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT: + return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT: + return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT"; + case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GEOMETRY_NV: + return "VK_STRUCTURE_TYPE_GEOMETRY_NV"; + case VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV: + return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV"; + case VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV: + return "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV"; + case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV: + return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV"; + case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: + return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT"; + case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD: + return "VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: + return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP: + return "VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV: + return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV"; + case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV: + return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL"; + case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL"; + case VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL: + return "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL: + return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD: + return "VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD"; + case VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR"; + case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR: + return "VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR"; + case VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR: + return "VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV: + return "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT"; + case VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT"; + case VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT: + return "VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT: + return "VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT"; + case VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT: + return "VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR"; + case VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR: + return "VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT: + return "VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT"; + case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: + return "VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT: + return "VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM: + return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV"; + case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV: + return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_PRESENT_ID_KHR: + return "VK_STRUCTURE_TYPE_PRESENT_ID_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR"; + case VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV"; + case VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV: + return "VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV: + return "VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT"; + case VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT"; + case VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM: + return "VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT: + return "VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT"; + case VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT: + return "VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT: + return "VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT"; + //case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT: + //return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA: + return "VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA"; + case VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA: + return "VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA"; + case VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA: + return "VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA"; + case VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI: + return "VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV: + return "VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT: + return "VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT: + return "VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT"; + case VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT: + return "VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX: + return "VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT"; + case VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT: + return "VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT: + return "VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV: + return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM: + return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE"; + case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE: + return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM"; + case VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM: + return "VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV: + return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV"; + case VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV: + return "VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT: + return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG: + return "VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG"; + case VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG: + return "VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT: + return "VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV"; + case VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV: + return "VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID: + return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR: + return "VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR"; + case VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR: + return "VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR"; + case VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR: + return "VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD"; + case VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD: + return "VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD"; + case VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD: + return "VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR: + return "VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR: + return "VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC"; + case VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC: + return "VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV"; + case VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV"; + case VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV: + return "VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV"; + case VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV: + return "VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV"; + case VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV: + return "VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV"; + case VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV"; + case VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV: + return "VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV"; + case VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV: + return "VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV"; + case VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV: + return "VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM: + return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR: + return "VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM"; + case VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM: + return "VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM: + return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX: + return "VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX"; + case VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX: + return "VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX"; + case VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX: + return "VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX"; + case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX: + return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR: + return "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR: + return "VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR"; + case VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR: + return "VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR"; + case VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR: + return "VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR"; + case VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR: + return "VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR"; + case VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR: + return "VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR"; + case VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT: + return "VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT"; + case VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT: + return "VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT"; + case VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT: + return "VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT"; + case VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT: + return "VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT"; + case VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT: + return "VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT"; + case VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT: + return "VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA"; + case VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA: + return "VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA"; + case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT: + return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT"; + case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT: + return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT"; + default: + return "Unhandled VkStructureType"; + } +} +static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value) { + switch (input_value) { + case VK_PIPELINE_CACHE_HEADER_VERSION_ONE: + return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE"; + default: + return "Unhandled VkPipelineCacheHeaderVersion"; + } +} +static inline const char* string_VkImageLayout(VkImageLayout input_value) { + switch (input_value) { + case VK_IMAGE_LAYOUT_UNDEFINED: + return "VK_IMAGE_LAYOUT_UNDEFINED"; + case VK_IMAGE_LAYOUT_GENERAL: + return "VK_IMAGE_LAYOUT_GENERAL"; + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; + case VK_IMAGE_LAYOUT_PREINITIALIZED: + return "VK_IMAGE_LAYOUT_PREINITIALIZED"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: + return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR"; + case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: + return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; + case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: + return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"; + case VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR: + return "VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR"; + case VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR: + return "VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR"; + case VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR: + return "VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR"; + case VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT: + return "VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT"; + default: + return "Unhandled VkImageLayout"; + } +} +static inline const char* string_VkObjectType(VkObjectType input_value) { + switch (input_value) { + case VK_OBJECT_TYPE_UNKNOWN: + return "VK_OBJECT_TYPE_UNKNOWN"; + case VK_OBJECT_TYPE_INSTANCE: + return "VK_OBJECT_TYPE_INSTANCE"; + case VK_OBJECT_TYPE_PHYSICAL_DEVICE: + return "VK_OBJECT_TYPE_PHYSICAL_DEVICE"; + case VK_OBJECT_TYPE_DEVICE: + return "VK_OBJECT_TYPE_DEVICE"; + case VK_OBJECT_TYPE_QUEUE: + return "VK_OBJECT_TYPE_QUEUE"; + case VK_OBJECT_TYPE_SEMAPHORE: + return "VK_OBJECT_TYPE_SEMAPHORE"; + case VK_OBJECT_TYPE_COMMAND_BUFFER: + return "VK_OBJECT_TYPE_COMMAND_BUFFER"; + case VK_OBJECT_TYPE_FENCE: + return "VK_OBJECT_TYPE_FENCE"; + case VK_OBJECT_TYPE_DEVICE_MEMORY: + return "VK_OBJECT_TYPE_DEVICE_MEMORY"; + case VK_OBJECT_TYPE_BUFFER: + return "VK_OBJECT_TYPE_BUFFER"; + case VK_OBJECT_TYPE_IMAGE: + return "VK_OBJECT_TYPE_IMAGE"; + case VK_OBJECT_TYPE_EVENT: + return "VK_OBJECT_TYPE_EVENT"; + case VK_OBJECT_TYPE_QUERY_POOL: + return "VK_OBJECT_TYPE_QUERY_POOL"; + case VK_OBJECT_TYPE_BUFFER_VIEW: + return "VK_OBJECT_TYPE_BUFFER_VIEW"; + case VK_OBJECT_TYPE_IMAGE_VIEW: + return "VK_OBJECT_TYPE_IMAGE_VIEW"; + case VK_OBJECT_TYPE_SHADER_MODULE: + return "VK_OBJECT_TYPE_SHADER_MODULE"; + case VK_OBJECT_TYPE_PIPELINE_CACHE: + return "VK_OBJECT_TYPE_PIPELINE_CACHE"; + case VK_OBJECT_TYPE_PIPELINE_LAYOUT: + return "VK_OBJECT_TYPE_PIPELINE_LAYOUT"; + case VK_OBJECT_TYPE_RENDER_PASS: + return "VK_OBJECT_TYPE_RENDER_PASS"; + case VK_OBJECT_TYPE_PIPELINE: + return "VK_OBJECT_TYPE_PIPELINE"; + case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: + return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT"; + case VK_OBJECT_TYPE_SAMPLER: + return "VK_OBJECT_TYPE_SAMPLER"; + case VK_OBJECT_TYPE_DESCRIPTOR_POOL: + return "VK_OBJECT_TYPE_DESCRIPTOR_POOL"; + case VK_OBJECT_TYPE_DESCRIPTOR_SET: + return "VK_OBJECT_TYPE_DESCRIPTOR_SET"; + case VK_OBJECT_TYPE_FRAMEBUFFER: + return "VK_OBJECT_TYPE_FRAMEBUFFER"; + case VK_OBJECT_TYPE_COMMAND_POOL: + return "VK_OBJECT_TYPE_COMMAND_POOL"; + case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: + return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION"; + case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: + return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE"; + case VK_OBJECT_TYPE_PRIVATE_DATA_SLOT: + return "VK_OBJECT_TYPE_PRIVATE_DATA_SLOT"; + case VK_OBJECT_TYPE_SURFACE_KHR: + return "VK_OBJECT_TYPE_SURFACE_KHR"; + case VK_OBJECT_TYPE_SWAPCHAIN_KHR: + return "VK_OBJECT_TYPE_SWAPCHAIN_KHR"; + case VK_OBJECT_TYPE_DISPLAY_KHR: + return "VK_OBJECT_TYPE_DISPLAY_KHR"; + case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: + return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR"; + case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: + return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT"; + case VK_OBJECT_TYPE_VIDEO_SESSION_KHR: + return "VK_OBJECT_TYPE_VIDEO_SESSION_KHR"; + case VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR: + return "VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR"; + case VK_OBJECT_TYPE_CU_MODULE_NVX: + return "VK_OBJECT_TYPE_CU_MODULE_NVX"; + case VK_OBJECT_TYPE_CU_FUNCTION_NVX: + return "VK_OBJECT_TYPE_CU_FUNCTION_NVX"; + case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT: + return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT"; + case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR: + return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR"; + case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: + return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT"; + case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV: + return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV"; + case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL: + return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL"; + case VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR: + return "VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR"; + case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV: + return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV"; + case VK_OBJECT_TYPE_CUDA_MODULE_NV: + return "VK_OBJECT_TYPE_CUDA_MODULE_NV"; + case VK_OBJECT_TYPE_CUDA_FUNCTION_NV: + return "VK_OBJECT_TYPE_CUDA_FUNCTION_NV"; + case VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA: + return "VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA"; + case VK_OBJECT_TYPE_MICROMAP_EXT: + return "VK_OBJECT_TYPE_MICROMAP_EXT"; + case VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV: + return "VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV"; + case VK_OBJECT_TYPE_SHADER_EXT: + return "VK_OBJECT_TYPE_SHADER_EXT"; + case VK_OBJECT_TYPE_PIPELINE_BINARY_KHR: + return "VK_OBJECT_TYPE_PIPELINE_BINARY_KHR"; + case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT: + return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT"; + case VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT: + return "VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT"; + default: + return "Unhandled VkObjectType"; + } +} +static inline const char* string_VkVendorId(VkVendorId input_value) { + switch (input_value) { + case VK_VENDOR_ID_KHRONOS: + return "VK_VENDOR_ID_KHRONOS"; + case VK_VENDOR_ID_VIV: + return "VK_VENDOR_ID_VIV"; + case VK_VENDOR_ID_VSI: + return "VK_VENDOR_ID_VSI"; + case VK_VENDOR_ID_KAZAN: + return "VK_VENDOR_ID_KAZAN"; + case VK_VENDOR_ID_CODEPLAY: + return "VK_VENDOR_ID_CODEPLAY"; + case VK_VENDOR_ID_MESA: + return "VK_VENDOR_ID_MESA"; + case VK_VENDOR_ID_POCL: + return "VK_VENDOR_ID_POCL"; + case VK_VENDOR_ID_MOBILEYE: + return "VK_VENDOR_ID_MOBILEYE"; + default: + return "Unhandled VkVendorId"; + } +} +static inline const char* string_VkSystemAllocationScope(VkSystemAllocationScope input_value) { + switch (input_value) { + case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND: + return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND"; + case VK_SYSTEM_ALLOCATION_SCOPE_OBJECT: + return "VK_SYSTEM_ALLOCATION_SCOPE_OBJECT"; + case VK_SYSTEM_ALLOCATION_SCOPE_CACHE: + return "VK_SYSTEM_ALLOCATION_SCOPE_CACHE"; + case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE: + return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE"; + case VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE: + return "VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE"; + default: + return "Unhandled VkSystemAllocationScope"; + } +} +static inline const char* string_VkInternalAllocationType(VkInternalAllocationType input_value) { + switch (input_value) { + case VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE: + return "VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE"; + default: + return "Unhandled VkInternalAllocationType"; + } +} +static inline const char* string_VkFormat(VkFormat input_value) { + switch (input_value) { + case VK_FORMAT_UNDEFINED: + return "VK_FORMAT_UNDEFINED"; + case VK_FORMAT_R4G4_UNORM_PACK8: + return "VK_FORMAT_R4G4_UNORM_PACK8"; + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: + return "VK_FORMAT_R4G4B4A4_UNORM_PACK16"; + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: + return "VK_FORMAT_B4G4R4A4_UNORM_PACK16"; + case VK_FORMAT_R5G6B5_UNORM_PACK16: + return "VK_FORMAT_R5G6B5_UNORM_PACK16"; + case VK_FORMAT_B5G6R5_UNORM_PACK16: + return "VK_FORMAT_B5G6R5_UNORM_PACK16"; + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: + return "VK_FORMAT_R5G5B5A1_UNORM_PACK16"; + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: + return "VK_FORMAT_B5G5R5A1_UNORM_PACK16"; + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: + return "VK_FORMAT_A1R5G5B5_UNORM_PACK16"; + case VK_FORMAT_R8_UNORM: + return "VK_FORMAT_R8_UNORM"; + case VK_FORMAT_R8_SNORM: + return "VK_FORMAT_R8_SNORM"; + case VK_FORMAT_R8_USCALED: + return "VK_FORMAT_R8_USCALED"; + case VK_FORMAT_R8_SSCALED: + return "VK_FORMAT_R8_SSCALED"; + case VK_FORMAT_R8_UINT: + return "VK_FORMAT_R8_UINT"; + case VK_FORMAT_R8_SINT: + return "VK_FORMAT_R8_SINT"; + case VK_FORMAT_R8_SRGB: + return "VK_FORMAT_R8_SRGB"; + case VK_FORMAT_R8G8_UNORM: + return "VK_FORMAT_R8G8_UNORM"; + case VK_FORMAT_R8G8_SNORM: + return "VK_FORMAT_R8G8_SNORM"; + case VK_FORMAT_R8G8_USCALED: + return "VK_FORMAT_R8G8_USCALED"; + case VK_FORMAT_R8G8_SSCALED: + return "VK_FORMAT_R8G8_SSCALED"; + case VK_FORMAT_R8G8_UINT: + return "VK_FORMAT_R8G8_UINT"; + case VK_FORMAT_R8G8_SINT: + return "VK_FORMAT_R8G8_SINT"; + case VK_FORMAT_R8G8_SRGB: + return "VK_FORMAT_R8G8_SRGB"; + case VK_FORMAT_R8G8B8_UNORM: + return "VK_FORMAT_R8G8B8_UNORM"; + case VK_FORMAT_R8G8B8_SNORM: + return "VK_FORMAT_R8G8B8_SNORM"; + case VK_FORMAT_R8G8B8_USCALED: + return "VK_FORMAT_R8G8B8_USCALED"; + case VK_FORMAT_R8G8B8_SSCALED: + return "VK_FORMAT_R8G8B8_SSCALED"; + case VK_FORMAT_R8G8B8_UINT: + return "VK_FORMAT_R8G8B8_UINT"; + case VK_FORMAT_R8G8B8_SINT: + return "VK_FORMAT_R8G8B8_SINT"; + case VK_FORMAT_R8G8B8_SRGB: + return "VK_FORMAT_R8G8B8_SRGB"; + case VK_FORMAT_B8G8R8_UNORM: + return "VK_FORMAT_B8G8R8_UNORM"; + case VK_FORMAT_B8G8R8_SNORM: + return "VK_FORMAT_B8G8R8_SNORM"; + case VK_FORMAT_B8G8R8_USCALED: + return "VK_FORMAT_B8G8R8_USCALED"; + case VK_FORMAT_B8G8R8_SSCALED: + return "VK_FORMAT_B8G8R8_SSCALED"; + case VK_FORMAT_B8G8R8_UINT: + return "VK_FORMAT_B8G8R8_UINT"; + case VK_FORMAT_B8G8R8_SINT: + return "VK_FORMAT_B8G8R8_SINT"; + case VK_FORMAT_B8G8R8_SRGB: + return "VK_FORMAT_B8G8R8_SRGB"; + case VK_FORMAT_R8G8B8A8_UNORM: + return "VK_FORMAT_R8G8B8A8_UNORM"; + case VK_FORMAT_R8G8B8A8_SNORM: + return "VK_FORMAT_R8G8B8A8_SNORM"; + case VK_FORMAT_R8G8B8A8_USCALED: + return "VK_FORMAT_R8G8B8A8_USCALED"; + case VK_FORMAT_R8G8B8A8_SSCALED: + return "VK_FORMAT_R8G8B8A8_SSCALED"; + case VK_FORMAT_R8G8B8A8_UINT: + return "VK_FORMAT_R8G8B8A8_UINT"; + case VK_FORMAT_R8G8B8A8_SINT: + return "VK_FORMAT_R8G8B8A8_SINT"; + case VK_FORMAT_R8G8B8A8_SRGB: + return "VK_FORMAT_R8G8B8A8_SRGB"; + case VK_FORMAT_B8G8R8A8_UNORM: + return "VK_FORMAT_B8G8R8A8_UNORM"; + case VK_FORMAT_B8G8R8A8_SNORM: + return "VK_FORMAT_B8G8R8A8_SNORM"; + case VK_FORMAT_B8G8R8A8_USCALED: + return "VK_FORMAT_B8G8R8A8_USCALED"; + case VK_FORMAT_B8G8R8A8_SSCALED: + return "VK_FORMAT_B8G8R8A8_SSCALED"; + case VK_FORMAT_B8G8R8A8_UINT: + return "VK_FORMAT_B8G8R8A8_UINT"; + case VK_FORMAT_B8G8R8A8_SINT: + return "VK_FORMAT_B8G8R8A8_SINT"; + case VK_FORMAT_B8G8R8A8_SRGB: + return "VK_FORMAT_B8G8R8A8_SRGB"; + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: + return "VK_FORMAT_A8B8G8R8_UNORM_PACK32"; + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: + return "VK_FORMAT_A8B8G8R8_SNORM_PACK32"; + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: + return "VK_FORMAT_A8B8G8R8_USCALED_PACK32"; + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: + return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32"; + case VK_FORMAT_A8B8G8R8_UINT_PACK32: + return "VK_FORMAT_A8B8G8R8_UINT_PACK32"; + case VK_FORMAT_A8B8G8R8_SINT_PACK32: + return "VK_FORMAT_A8B8G8R8_SINT_PACK32"; + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: + return "VK_FORMAT_A8B8G8R8_SRGB_PACK32"; + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: + return "VK_FORMAT_A2R10G10B10_UNORM_PACK32"; + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: + return "VK_FORMAT_A2R10G10B10_SNORM_PACK32"; + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: + return "VK_FORMAT_A2R10G10B10_USCALED_PACK32"; + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: + return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32"; + case VK_FORMAT_A2R10G10B10_UINT_PACK32: + return "VK_FORMAT_A2R10G10B10_UINT_PACK32"; + case VK_FORMAT_A2R10G10B10_SINT_PACK32: + return "VK_FORMAT_A2R10G10B10_SINT_PACK32"; + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: + return "VK_FORMAT_A2B10G10R10_UNORM_PACK32"; + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: + return "VK_FORMAT_A2B10G10R10_SNORM_PACK32"; + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: + return "VK_FORMAT_A2B10G10R10_USCALED_PACK32"; + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: + return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32"; + case VK_FORMAT_A2B10G10R10_UINT_PACK32: + return "VK_FORMAT_A2B10G10R10_UINT_PACK32"; + case VK_FORMAT_A2B10G10R10_SINT_PACK32: + return "VK_FORMAT_A2B10G10R10_SINT_PACK32"; + case VK_FORMAT_R16_UNORM: + return "VK_FORMAT_R16_UNORM"; + case VK_FORMAT_R16_SNORM: + return "VK_FORMAT_R16_SNORM"; + case VK_FORMAT_R16_USCALED: + return "VK_FORMAT_R16_USCALED"; + case VK_FORMAT_R16_SSCALED: + return "VK_FORMAT_R16_SSCALED"; + case VK_FORMAT_R16_UINT: + return "VK_FORMAT_R16_UINT"; + case VK_FORMAT_R16_SINT: + return "VK_FORMAT_R16_SINT"; + case VK_FORMAT_R16_SFLOAT: + return "VK_FORMAT_R16_SFLOAT"; + case VK_FORMAT_R16G16_UNORM: + return "VK_FORMAT_R16G16_UNORM"; + case VK_FORMAT_R16G16_SNORM: + return "VK_FORMAT_R16G16_SNORM"; + case VK_FORMAT_R16G16_USCALED: + return "VK_FORMAT_R16G16_USCALED"; + case VK_FORMAT_R16G16_SSCALED: + return "VK_FORMAT_R16G16_SSCALED"; + case VK_FORMAT_R16G16_UINT: + return "VK_FORMAT_R16G16_UINT"; + case VK_FORMAT_R16G16_SINT: + return "VK_FORMAT_R16G16_SINT"; + case VK_FORMAT_R16G16_SFLOAT: + return "VK_FORMAT_R16G16_SFLOAT"; + case VK_FORMAT_R16G16B16_UNORM: + return "VK_FORMAT_R16G16B16_UNORM"; + case VK_FORMAT_R16G16B16_SNORM: + return "VK_FORMAT_R16G16B16_SNORM"; + case VK_FORMAT_R16G16B16_USCALED: + return "VK_FORMAT_R16G16B16_USCALED"; + case VK_FORMAT_R16G16B16_SSCALED: + return "VK_FORMAT_R16G16B16_SSCALED"; + case VK_FORMAT_R16G16B16_UINT: + return "VK_FORMAT_R16G16B16_UINT"; + case VK_FORMAT_R16G16B16_SINT: + return "VK_FORMAT_R16G16B16_SINT"; + case VK_FORMAT_R16G16B16_SFLOAT: + return "VK_FORMAT_R16G16B16_SFLOAT"; + case VK_FORMAT_R16G16B16A16_UNORM: + return "VK_FORMAT_R16G16B16A16_UNORM"; + case VK_FORMAT_R16G16B16A16_SNORM: + return "VK_FORMAT_R16G16B16A16_SNORM"; + case VK_FORMAT_R16G16B16A16_USCALED: + return "VK_FORMAT_R16G16B16A16_USCALED"; + case VK_FORMAT_R16G16B16A16_SSCALED: + return "VK_FORMAT_R16G16B16A16_SSCALED"; + case VK_FORMAT_R16G16B16A16_UINT: + return "VK_FORMAT_R16G16B16A16_UINT"; + case VK_FORMAT_R16G16B16A16_SINT: + return "VK_FORMAT_R16G16B16A16_SINT"; + case VK_FORMAT_R16G16B16A16_SFLOAT: + return "VK_FORMAT_R16G16B16A16_SFLOAT"; + case VK_FORMAT_R32_UINT: + return "VK_FORMAT_R32_UINT"; + case VK_FORMAT_R32_SINT: + return "VK_FORMAT_R32_SINT"; + case VK_FORMAT_R32_SFLOAT: + return "VK_FORMAT_R32_SFLOAT"; + case VK_FORMAT_R32G32_UINT: + return "VK_FORMAT_R32G32_UINT"; + case VK_FORMAT_R32G32_SINT: + return "VK_FORMAT_R32G32_SINT"; + case VK_FORMAT_R32G32_SFLOAT: + return "VK_FORMAT_R32G32_SFLOAT"; + case VK_FORMAT_R32G32B32_UINT: + return "VK_FORMAT_R32G32B32_UINT"; + case VK_FORMAT_R32G32B32_SINT: + return "VK_FORMAT_R32G32B32_SINT"; + case VK_FORMAT_R32G32B32_SFLOAT: + return "VK_FORMAT_R32G32B32_SFLOAT"; + case VK_FORMAT_R32G32B32A32_UINT: + return "VK_FORMAT_R32G32B32A32_UINT"; + case VK_FORMAT_R32G32B32A32_SINT: + return "VK_FORMAT_R32G32B32A32_SINT"; + case VK_FORMAT_R32G32B32A32_SFLOAT: + return "VK_FORMAT_R32G32B32A32_SFLOAT"; + case VK_FORMAT_R64_UINT: + return "VK_FORMAT_R64_UINT"; + case VK_FORMAT_R64_SINT: + return "VK_FORMAT_R64_SINT"; + case VK_FORMAT_R64_SFLOAT: + return "VK_FORMAT_R64_SFLOAT"; + case VK_FORMAT_R64G64_UINT: + return "VK_FORMAT_R64G64_UINT"; + case VK_FORMAT_R64G64_SINT: + return "VK_FORMAT_R64G64_SINT"; + case VK_FORMAT_R64G64_SFLOAT: + return "VK_FORMAT_R64G64_SFLOAT"; + case VK_FORMAT_R64G64B64_UINT: + return "VK_FORMAT_R64G64B64_UINT"; + case VK_FORMAT_R64G64B64_SINT: + return "VK_FORMAT_R64G64B64_SINT"; + case VK_FORMAT_R64G64B64_SFLOAT: + return "VK_FORMAT_R64G64B64_SFLOAT"; + case VK_FORMAT_R64G64B64A64_UINT: + return "VK_FORMAT_R64G64B64A64_UINT"; + case VK_FORMAT_R64G64B64A64_SINT: + return "VK_FORMAT_R64G64B64A64_SINT"; + case VK_FORMAT_R64G64B64A64_SFLOAT: + return "VK_FORMAT_R64G64B64A64_SFLOAT"; + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: + return "VK_FORMAT_B10G11R11_UFLOAT_PACK32"; + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: + return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32"; + case VK_FORMAT_D16_UNORM: + return "VK_FORMAT_D16_UNORM"; + case VK_FORMAT_X8_D24_UNORM_PACK32: + return "VK_FORMAT_X8_D24_UNORM_PACK32"; + case VK_FORMAT_D32_SFLOAT: + return "VK_FORMAT_D32_SFLOAT"; + case VK_FORMAT_S8_UINT: + return "VK_FORMAT_S8_UINT"; + case VK_FORMAT_D16_UNORM_S8_UINT: + return "VK_FORMAT_D16_UNORM_S8_UINT"; + case VK_FORMAT_D24_UNORM_S8_UINT: + return "VK_FORMAT_D24_UNORM_S8_UINT"; + case VK_FORMAT_D32_SFLOAT_S8_UINT: + return "VK_FORMAT_D32_SFLOAT_S8_UINT"; + case VK_FORMAT_BC1_RGB_UNORM_BLOCK: + return "VK_FORMAT_BC1_RGB_UNORM_BLOCK"; + case VK_FORMAT_BC1_RGB_SRGB_BLOCK: + return "VK_FORMAT_BC1_RGB_SRGB_BLOCK"; + case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: + return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK"; + case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: + return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK"; + case VK_FORMAT_BC2_UNORM_BLOCK: + return "VK_FORMAT_BC2_UNORM_BLOCK"; + case VK_FORMAT_BC2_SRGB_BLOCK: + return "VK_FORMAT_BC2_SRGB_BLOCK"; + case VK_FORMAT_BC3_UNORM_BLOCK: + return "VK_FORMAT_BC3_UNORM_BLOCK"; + case VK_FORMAT_BC3_SRGB_BLOCK: + return "VK_FORMAT_BC3_SRGB_BLOCK"; + case VK_FORMAT_BC4_UNORM_BLOCK: + return "VK_FORMAT_BC4_UNORM_BLOCK"; + case VK_FORMAT_BC4_SNORM_BLOCK: + return "VK_FORMAT_BC4_SNORM_BLOCK"; + case VK_FORMAT_BC5_UNORM_BLOCK: + return "VK_FORMAT_BC5_UNORM_BLOCK"; + case VK_FORMAT_BC5_SNORM_BLOCK: + return "VK_FORMAT_BC5_SNORM_BLOCK"; + case VK_FORMAT_BC6H_UFLOAT_BLOCK: + return "VK_FORMAT_BC6H_UFLOAT_BLOCK"; + case VK_FORMAT_BC6H_SFLOAT_BLOCK: + return "VK_FORMAT_BC6H_SFLOAT_BLOCK"; + case VK_FORMAT_BC7_UNORM_BLOCK: + return "VK_FORMAT_BC7_UNORM_BLOCK"; + case VK_FORMAT_BC7_SRGB_BLOCK: + return "VK_FORMAT_BC7_SRGB_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK"; + case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: + return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK"; + case VK_FORMAT_EAC_R11_UNORM_BLOCK: + return "VK_FORMAT_EAC_R11_UNORM_BLOCK"; + case VK_FORMAT_EAC_R11_SNORM_BLOCK: + return "VK_FORMAT_EAC_R11_SNORM_BLOCK"; + case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: + return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK"; + case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: + return "VK_FORMAT_EAC_R11G11_SNORM_BLOCK"; + case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: + return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK"; + case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: + return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK"; + case VK_FORMAT_ASTC_5x4_UNORM_BLOCK: + return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK"; + case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: + return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK"; + case VK_FORMAT_ASTC_5x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_6x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_6x6_UNORM_BLOCK: + return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK"; + case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: + return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK"; + case VK_FORMAT_ASTC_8x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_8x6_UNORM_BLOCK: + return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK"; + case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: + return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK"; + case VK_FORMAT_ASTC_8x8_UNORM_BLOCK: + return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK"; + case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: + return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x5_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x5_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x6_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x8_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK"; + case VK_FORMAT_ASTC_10x10_UNORM_BLOCK: + return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK"; + case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: + return "VK_FORMAT_ASTC_10x10_SRGB_BLOCK"; + case VK_FORMAT_ASTC_12x10_UNORM_BLOCK: + return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK"; + case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: + return "VK_FORMAT_ASTC_12x10_SRGB_BLOCK"; + case VK_FORMAT_ASTC_12x12_UNORM_BLOCK: + return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK"; + case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: + return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK"; + case VK_FORMAT_G8B8G8R8_422_UNORM: + return "VK_FORMAT_G8B8G8R8_422_UNORM"; + case VK_FORMAT_B8G8R8G8_422_UNORM: + return "VK_FORMAT_B8G8R8G8_422_UNORM"; + case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: + return "VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM"; + case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: + return "VK_FORMAT_G8_B8R8_2PLANE_420_UNORM"; + case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM: + return "VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM"; + case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM: + return "VK_FORMAT_G8_B8R8_2PLANE_422_UNORM"; + case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM: + return "VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM"; + case VK_FORMAT_R10X6_UNORM_PACK16: + return "VK_FORMAT_R10X6_UNORM_PACK16"; + case VK_FORMAT_R10X6G10X6_UNORM_2PACK16: + return "VK_FORMAT_R10X6G10X6_UNORM_2PACK16"; + case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16: + return "VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16"; + case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: + return "VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16"; + case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: + return "VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16"; + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_R12X4_UNORM_PACK16: + return "VK_FORMAT_R12X4_UNORM_PACK16"; + case VK_FORMAT_R12X4G12X4_UNORM_2PACK16: + return "VK_FORMAT_R12X4G12X4_UNORM_2PACK16"; + case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16: + return "VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16"; + case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: + return "VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16"; + case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: + return "VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16"; + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_G16B16G16R16_422_UNORM: + return "VK_FORMAT_G16B16G16R16_422_UNORM"; + case VK_FORMAT_B16G16R16G16_422_UNORM: + return "VK_FORMAT_B16G16R16G16_422_UNORM"; + case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM: + return "VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM"; + case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM: + return "VK_FORMAT_G16_B16R16_2PLANE_420_UNORM"; + case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM: + return "VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM"; + case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM: + return "VK_FORMAT_G16_B16R16_2PLANE_422_UNORM"; + case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM: + return "VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM"; + case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM: + return "VK_FORMAT_G8_B8R8_2PLANE_444_UNORM"; + case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: + return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16"; + case VK_FORMAT_G16_B16R16_2PLANE_444_UNORM: + return "VK_FORMAT_G16_B16R16_2PLANE_444_UNORM"; + case VK_FORMAT_A4R4G4B4_UNORM_PACK16: + return "VK_FORMAT_A4R4G4B4_UNORM_PACK16"; + case VK_FORMAT_A4B4G4R4_UNORM_PACK16: + return "VK_FORMAT_A4B4G4R4_UNORM_PACK16"; + case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK"; + case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK: + return "VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK"; + case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG"; + case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG: + return "VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG"; + case VK_FORMAT_R16G16_SFIXED5_NV: + return "VK_FORMAT_R16G16_SFIXED5_NV"; + case VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR: + return "VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR"; + case VK_FORMAT_A8_UNORM_KHR: + return "VK_FORMAT_A8_UNORM_KHR"; + default: + return "Unhandled VkFormat"; + } +} +static inline const char* string_VkImageTiling(VkImageTiling input_value) { + switch (input_value) { + case VK_IMAGE_TILING_OPTIMAL: + return "VK_IMAGE_TILING_OPTIMAL"; + case VK_IMAGE_TILING_LINEAR: + return "VK_IMAGE_TILING_LINEAR"; + case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT: + return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT"; + default: + return "Unhandled VkImageTiling"; + } +} +static inline const char* string_VkImageType(VkImageType input_value) { + switch (input_value) { + case VK_IMAGE_TYPE_1D: + return "VK_IMAGE_TYPE_1D"; + case VK_IMAGE_TYPE_2D: + return "VK_IMAGE_TYPE_2D"; + case VK_IMAGE_TYPE_3D: + return "VK_IMAGE_TYPE_3D"; + default: + return "Unhandled VkImageType"; + } +} +static inline const char* string_VkPhysicalDeviceType(VkPhysicalDeviceType input_value) { + switch (input_value) { + case VK_PHYSICAL_DEVICE_TYPE_OTHER: + return "VK_PHYSICAL_DEVICE_TYPE_OTHER"; + case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: + return "VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU"; + case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: + return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU"; + case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: + return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU"; + case VK_PHYSICAL_DEVICE_TYPE_CPU: + return "VK_PHYSICAL_DEVICE_TYPE_CPU"; + default: + return "Unhandled VkPhysicalDeviceType"; + } +} +static inline const char* string_VkQueryType(VkQueryType input_value) { + switch (input_value) { + case VK_QUERY_TYPE_OCCLUSION: + return "VK_QUERY_TYPE_OCCLUSION"; + case VK_QUERY_TYPE_PIPELINE_STATISTICS: + return "VK_QUERY_TYPE_PIPELINE_STATISTICS"; + case VK_QUERY_TYPE_TIMESTAMP: + return "VK_QUERY_TYPE_TIMESTAMP"; + case VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR: + return "VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR"; + case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT: + return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT"; + case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR: + return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV"; + case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL: + return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL"; + case VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR: + return "VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR"; + case VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT: + return "VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT"; + case VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT: + return "VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR"; + case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR: + return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR"; + case VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT: + return "VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT"; + case VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT: + return "VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT"; + default: + return "Unhandled VkQueryType"; + } +} +static inline const char* string_VkSharingMode(VkSharingMode input_value) { + switch (input_value) { + case VK_SHARING_MODE_EXCLUSIVE: + return "VK_SHARING_MODE_EXCLUSIVE"; + case VK_SHARING_MODE_CONCURRENT: + return "VK_SHARING_MODE_CONCURRENT"; + default: + return "Unhandled VkSharingMode"; + } +} +static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_value) { + switch (input_value) { + case VK_COMPONENT_SWIZZLE_IDENTITY: + return "VK_COMPONENT_SWIZZLE_IDENTITY"; + case VK_COMPONENT_SWIZZLE_ZERO: + return "VK_COMPONENT_SWIZZLE_ZERO"; + case VK_COMPONENT_SWIZZLE_ONE: + return "VK_COMPONENT_SWIZZLE_ONE"; + case VK_COMPONENT_SWIZZLE_R: + return "VK_COMPONENT_SWIZZLE_R"; + case VK_COMPONENT_SWIZZLE_G: + return "VK_COMPONENT_SWIZZLE_G"; + case VK_COMPONENT_SWIZZLE_B: + return "VK_COMPONENT_SWIZZLE_B"; + case VK_COMPONENT_SWIZZLE_A: + return "VK_COMPONENT_SWIZZLE_A"; + default: + return "Unhandled VkComponentSwizzle"; + } +} +static inline const char* string_VkImageViewType(VkImageViewType input_value) { + switch (input_value) { + case VK_IMAGE_VIEW_TYPE_1D: + return "VK_IMAGE_VIEW_TYPE_1D"; + case VK_IMAGE_VIEW_TYPE_2D: + return "VK_IMAGE_VIEW_TYPE_2D"; + case VK_IMAGE_VIEW_TYPE_3D: + return "VK_IMAGE_VIEW_TYPE_3D"; + case VK_IMAGE_VIEW_TYPE_CUBE: + return "VK_IMAGE_VIEW_TYPE_CUBE"; + case VK_IMAGE_VIEW_TYPE_1D_ARRAY: + return "VK_IMAGE_VIEW_TYPE_1D_ARRAY"; + case VK_IMAGE_VIEW_TYPE_2D_ARRAY: + return "VK_IMAGE_VIEW_TYPE_2D_ARRAY"; + case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY: + return "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY"; + default: + return "Unhandled VkImageViewType"; + } +} +static inline const char* string_VkBlendFactor(VkBlendFactor input_value) { + switch (input_value) { + case VK_BLEND_FACTOR_ZERO: + return "VK_BLEND_FACTOR_ZERO"; + case VK_BLEND_FACTOR_ONE: + return "VK_BLEND_FACTOR_ONE"; + case VK_BLEND_FACTOR_SRC_COLOR: + return "VK_BLEND_FACTOR_SRC_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR"; + case VK_BLEND_FACTOR_DST_COLOR: + return "VK_BLEND_FACTOR_DST_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR"; + case VK_BLEND_FACTOR_SRC_ALPHA: + return "VK_BLEND_FACTOR_SRC_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA"; + case VK_BLEND_FACTOR_DST_ALPHA: + return "VK_BLEND_FACTOR_DST_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA"; + case VK_BLEND_FACTOR_CONSTANT_COLOR: + return "VK_BLEND_FACTOR_CONSTANT_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR"; + case VK_BLEND_FACTOR_CONSTANT_ALPHA: + return "VK_BLEND_FACTOR_CONSTANT_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA"; + case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE: + return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE"; + case VK_BLEND_FACTOR_SRC1_COLOR: + return "VK_BLEND_FACTOR_SRC1_COLOR"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR"; + case VK_BLEND_FACTOR_SRC1_ALPHA: + return "VK_BLEND_FACTOR_SRC1_ALPHA"; + case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA: + return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA"; + default: + return "Unhandled VkBlendFactor"; + } +} +static inline const char* string_VkBlendOp(VkBlendOp input_value) { + switch (input_value) { + case VK_BLEND_OP_ADD: + return "VK_BLEND_OP_ADD"; + case VK_BLEND_OP_SUBTRACT: + return "VK_BLEND_OP_SUBTRACT"; + case VK_BLEND_OP_REVERSE_SUBTRACT: + return "VK_BLEND_OP_REVERSE_SUBTRACT"; + case VK_BLEND_OP_MIN: + return "VK_BLEND_OP_MIN"; + case VK_BLEND_OP_MAX: + return "VK_BLEND_OP_MAX"; + case VK_BLEND_OP_ZERO_EXT: + return "VK_BLEND_OP_ZERO_EXT"; + case VK_BLEND_OP_SRC_EXT: + return "VK_BLEND_OP_SRC_EXT"; + case VK_BLEND_OP_DST_EXT: + return "VK_BLEND_OP_DST_EXT"; + case VK_BLEND_OP_SRC_OVER_EXT: + return "VK_BLEND_OP_SRC_OVER_EXT"; + case VK_BLEND_OP_DST_OVER_EXT: + return "VK_BLEND_OP_DST_OVER_EXT"; + case VK_BLEND_OP_SRC_IN_EXT: + return "VK_BLEND_OP_SRC_IN_EXT"; + case VK_BLEND_OP_DST_IN_EXT: + return "VK_BLEND_OP_DST_IN_EXT"; + case VK_BLEND_OP_SRC_OUT_EXT: + return "VK_BLEND_OP_SRC_OUT_EXT"; + case VK_BLEND_OP_DST_OUT_EXT: + return "VK_BLEND_OP_DST_OUT_EXT"; + case VK_BLEND_OP_SRC_ATOP_EXT: + return "VK_BLEND_OP_SRC_ATOP_EXT"; + case VK_BLEND_OP_DST_ATOP_EXT: + return "VK_BLEND_OP_DST_ATOP_EXT"; + case VK_BLEND_OP_XOR_EXT: + return "VK_BLEND_OP_XOR_EXT"; + case VK_BLEND_OP_MULTIPLY_EXT: + return "VK_BLEND_OP_MULTIPLY_EXT"; + case VK_BLEND_OP_SCREEN_EXT: + return "VK_BLEND_OP_SCREEN_EXT"; + case VK_BLEND_OP_OVERLAY_EXT: + return "VK_BLEND_OP_OVERLAY_EXT"; + case VK_BLEND_OP_DARKEN_EXT: + return "VK_BLEND_OP_DARKEN_EXT"; + case VK_BLEND_OP_LIGHTEN_EXT: + return "VK_BLEND_OP_LIGHTEN_EXT"; + case VK_BLEND_OP_COLORDODGE_EXT: + return "VK_BLEND_OP_COLORDODGE_EXT"; + case VK_BLEND_OP_COLORBURN_EXT: + return "VK_BLEND_OP_COLORBURN_EXT"; + case VK_BLEND_OP_HARDLIGHT_EXT: + return "VK_BLEND_OP_HARDLIGHT_EXT"; + case VK_BLEND_OP_SOFTLIGHT_EXT: + return "VK_BLEND_OP_SOFTLIGHT_EXT"; + case VK_BLEND_OP_DIFFERENCE_EXT: + return "VK_BLEND_OP_DIFFERENCE_EXT"; + case VK_BLEND_OP_EXCLUSION_EXT: + return "VK_BLEND_OP_EXCLUSION_EXT"; + case VK_BLEND_OP_INVERT_EXT: + return "VK_BLEND_OP_INVERT_EXT"; + case VK_BLEND_OP_INVERT_RGB_EXT: + return "VK_BLEND_OP_INVERT_RGB_EXT"; + case VK_BLEND_OP_LINEARDODGE_EXT: + return "VK_BLEND_OP_LINEARDODGE_EXT"; + case VK_BLEND_OP_LINEARBURN_EXT: + return "VK_BLEND_OP_LINEARBURN_EXT"; + case VK_BLEND_OP_VIVIDLIGHT_EXT: + return "VK_BLEND_OP_VIVIDLIGHT_EXT"; + case VK_BLEND_OP_LINEARLIGHT_EXT: + return "VK_BLEND_OP_LINEARLIGHT_EXT"; + case VK_BLEND_OP_PINLIGHT_EXT: + return "VK_BLEND_OP_PINLIGHT_EXT"; + case VK_BLEND_OP_HARDMIX_EXT: + return "VK_BLEND_OP_HARDMIX_EXT"; + case VK_BLEND_OP_HSL_HUE_EXT: + return "VK_BLEND_OP_HSL_HUE_EXT"; + case VK_BLEND_OP_HSL_SATURATION_EXT: + return "VK_BLEND_OP_HSL_SATURATION_EXT"; + case VK_BLEND_OP_HSL_COLOR_EXT: + return "VK_BLEND_OP_HSL_COLOR_EXT"; + case VK_BLEND_OP_HSL_LUMINOSITY_EXT: + return "VK_BLEND_OP_HSL_LUMINOSITY_EXT"; + case VK_BLEND_OP_PLUS_EXT: + return "VK_BLEND_OP_PLUS_EXT"; + case VK_BLEND_OP_PLUS_CLAMPED_EXT: + return "VK_BLEND_OP_PLUS_CLAMPED_EXT"; + case VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT: + return "VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT"; + case VK_BLEND_OP_PLUS_DARKER_EXT: + return "VK_BLEND_OP_PLUS_DARKER_EXT"; + case VK_BLEND_OP_MINUS_EXT: + return "VK_BLEND_OP_MINUS_EXT"; + case VK_BLEND_OP_MINUS_CLAMPED_EXT: + return "VK_BLEND_OP_MINUS_CLAMPED_EXT"; + case VK_BLEND_OP_CONTRAST_EXT: + return "VK_BLEND_OP_CONTRAST_EXT"; + case VK_BLEND_OP_INVERT_OVG_EXT: + return "VK_BLEND_OP_INVERT_OVG_EXT"; + case VK_BLEND_OP_RED_EXT: + return "VK_BLEND_OP_RED_EXT"; + case VK_BLEND_OP_GREEN_EXT: + return "VK_BLEND_OP_GREEN_EXT"; + case VK_BLEND_OP_BLUE_EXT: + return "VK_BLEND_OP_BLUE_EXT"; + default: + return "Unhandled VkBlendOp"; + } +} +static inline const char* string_VkCompareOp(VkCompareOp input_value) { + switch (input_value) { + case VK_COMPARE_OP_NEVER: + return "VK_COMPARE_OP_NEVER"; + case VK_COMPARE_OP_LESS: + return "VK_COMPARE_OP_LESS"; + case VK_COMPARE_OP_EQUAL: + return "VK_COMPARE_OP_EQUAL"; + case VK_COMPARE_OP_LESS_OR_EQUAL: + return "VK_COMPARE_OP_LESS_OR_EQUAL"; + case VK_COMPARE_OP_GREATER: + return "VK_COMPARE_OP_GREATER"; + case VK_COMPARE_OP_NOT_EQUAL: + return "VK_COMPARE_OP_NOT_EQUAL"; + case VK_COMPARE_OP_GREATER_OR_EQUAL: + return "VK_COMPARE_OP_GREATER_OR_EQUAL"; + case VK_COMPARE_OP_ALWAYS: + return "VK_COMPARE_OP_ALWAYS"; + default: + return "Unhandled VkCompareOp"; + } +} +static inline const char* string_VkDynamicState(VkDynamicState input_value) { + switch (input_value) { + case VK_DYNAMIC_STATE_VIEWPORT: + return "VK_DYNAMIC_STATE_VIEWPORT"; + case VK_DYNAMIC_STATE_SCISSOR: + return "VK_DYNAMIC_STATE_SCISSOR"; + case VK_DYNAMIC_STATE_LINE_WIDTH: + return "VK_DYNAMIC_STATE_LINE_WIDTH"; + case VK_DYNAMIC_STATE_DEPTH_BIAS: + return "VK_DYNAMIC_STATE_DEPTH_BIAS"; + case VK_DYNAMIC_STATE_BLEND_CONSTANTS: + return "VK_DYNAMIC_STATE_BLEND_CONSTANTS"; + case VK_DYNAMIC_STATE_DEPTH_BOUNDS: + return "VK_DYNAMIC_STATE_DEPTH_BOUNDS"; + case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK: + return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK"; + case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK: + return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK"; + case VK_DYNAMIC_STATE_STENCIL_REFERENCE: + return "VK_DYNAMIC_STATE_STENCIL_REFERENCE"; + case VK_DYNAMIC_STATE_CULL_MODE: + return "VK_DYNAMIC_STATE_CULL_MODE"; + case VK_DYNAMIC_STATE_FRONT_FACE: + return "VK_DYNAMIC_STATE_FRONT_FACE"; + case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY: + return "VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY"; + case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT: + return "VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT"; + case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT: + return "VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT"; + case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE: + return "VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE"; + case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE"; + case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE"; + case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP: + return "VK_DYNAMIC_STATE_DEPTH_COMPARE_OP"; + case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE"; + case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE: + return "VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE"; + case VK_DYNAMIC_STATE_STENCIL_OP: + return "VK_DYNAMIC_STATE_STENCIL_OP"; + case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE: + return "VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE"; + case VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE: + return "VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE"; + case VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE: + return "VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE"; + case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV"; + case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT: + return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT"; + case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT: + return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT"; + case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT: + return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT"; + case VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR: + return "VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR"; + case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV"; + case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV"; + case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV: + return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV"; + case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV: + return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV"; + case VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR: + return "VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR"; + case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT: + return "VK_DYNAMIC_STATE_VERTEX_INPUT_EXT"; + case VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT: + return "VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT"; + case VK_DYNAMIC_STATE_LOGIC_OP_EXT: + return "VK_DYNAMIC_STATE_LOGIC_OP_EXT"; + case VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_POLYGON_MODE_EXT: + return "VK_DYNAMIC_STATE_POLYGON_MODE_EXT"; + case VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT: + return "VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT"; + case VK_DYNAMIC_STATE_SAMPLE_MASK_EXT: + return "VK_DYNAMIC_STATE_SAMPLE_MASK_EXT"; + case VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT: + return "VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT"; + case VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT: + return "VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT"; + case VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT: + return "VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT"; + case VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT: + return "VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT"; + case VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT: + return "VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT"; + case VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT: + return "VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT"; + case VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT: + return "VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT: + return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT"; + case VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT: + return "VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT"; + case VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT: + return "VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT"; + case VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT: + return "VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT"; + case VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT: + return "VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT"; + case VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT"; + case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV"; + case VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV: + return "VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV: + return "VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV"; + case VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV"; + case VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV: + return "VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV"; + case VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV: + return "VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV"; + case VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV: + return "VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV"; + case VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT: + return "VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT"; + case VK_DYNAMIC_STATE_LINE_STIPPLE_KHR: + return "VK_DYNAMIC_STATE_LINE_STIPPLE_KHR"; + case VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT: + return "VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT"; + default: + return "Unhandled VkDynamicState"; + } +} +static inline const char* string_VkFrontFace(VkFrontFace input_value) { + switch (input_value) { + case VK_FRONT_FACE_COUNTER_CLOCKWISE: + return "VK_FRONT_FACE_COUNTER_CLOCKWISE"; + case VK_FRONT_FACE_CLOCKWISE: + return "VK_FRONT_FACE_CLOCKWISE"; + default: + return "Unhandled VkFrontFace"; + } +} +static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value) { + switch (input_value) { + case VK_VERTEX_INPUT_RATE_VERTEX: + return "VK_VERTEX_INPUT_RATE_VERTEX"; + case VK_VERTEX_INPUT_RATE_INSTANCE: + return "VK_VERTEX_INPUT_RATE_INSTANCE"; + default: + return "Unhandled VkVertexInputRate"; + } +} +static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_value) { + switch (input_value) { + case VK_PRIMITIVE_TOPOLOGY_POINT_LIST: + return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST"; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST: + return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST"; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP: + return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"; + case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY: + return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"; + case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: + return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"; + default: + return "Unhandled VkPrimitiveTopology"; + } +} +static inline const char* string_VkPolygonMode(VkPolygonMode input_value) { + switch (input_value) { + case VK_POLYGON_MODE_FILL: + return "VK_POLYGON_MODE_FILL"; + case VK_POLYGON_MODE_LINE: + return "VK_POLYGON_MODE_LINE"; + case VK_POLYGON_MODE_POINT: + return "VK_POLYGON_MODE_POINT"; + case VK_POLYGON_MODE_FILL_RECTANGLE_NV: + return "VK_POLYGON_MODE_FILL_RECTANGLE_NV"; + default: + return "Unhandled VkPolygonMode"; + } +} +static inline const char* string_VkStencilOp(VkStencilOp input_value) { + switch (input_value) { + case VK_STENCIL_OP_KEEP: + return "VK_STENCIL_OP_KEEP"; + case VK_STENCIL_OP_ZERO: + return "VK_STENCIL_OP_ZERO"; + case VK_STENCIL_OP_REPLACE: + return "VK_STENCIL_OP_REPLACE"; + case VK_STENCIL_OP_INCREMENT_AND_CLAMP: + return "VK_STENCIL_OP_INCREMENT_AND_CLAMP"; + case VK_STENCIL_OP_DECREMENT_AND_CLAMP: + return "VK_STENCIL_OP_DECREMENT_AND_CLAMP"; + case VK_STENCIL_OP_INVERT: + return "VK_STENCIL_OP_INVERT"; + case VK_STENCIL_OP_INCREMENT_AND_WRAP: + return "VK_STENCIL_OP_INCREMENT_AND_WRAP"; + case VK_STENCIL_OP_DECREMENT_AND_WRAP: + return "VK_STENCIL_OP_DECREMENT_AND_WRAP"; + default: + return "Unhandled VkStencilOp"; + } +} +static inline const char* string_VkLogicOp(VkLogicOp input_value) { + switch (input_value) { + case VK_LOGIC_OP_CLEAR: + return "VK_LOGIC_OP_CLEAR"; + case VK_LOGIC_OP_AND: + return "VK_LOGIC_OP_AND"; + case VK_LOGIC_OP_AND_REVERSE: + return "VK_LOGIC_OP_AND_REVERSE"; + case VK_LOGIC_OP_COPY: + return "VK_LOGIC_OP_COPY"; + case VK_LOGIC_OP_AND_INVERTED: + return "VK_LOGIC_OP_AND_INVERTED"; + case VK_LOGIC_OP_NO_OP: + return "VK_LOGIC_OP_NO_OP"; + case VK_LOGIC_OP_XOR: + return "VK_LOGIC_OP_XOR"; + case VK_LOGIC_OP_OR: + return "VK_LOGIC_OP_OR"; + case VK_LOGIC_OP_NOR: + return "VK_LOGIC_OP_NOR"; + case VK_LOGIC_OP_EQUIVALENT: + return "VK_LOGIC_OP_EQUIVALENT"; + case VK_LOGIC_OP_INVERT: + return "VK_LOGIC_OP_INVERT"; + case VK_LOGIC_OP_OR_REVERSE: + return "VK_LOGIC_OP_OR_REVERSE"; + case VK_LOGIC_OP_COPY_INVERTED: + return "VK_LOGIC_OP_COPY_INVERTED"; + case VK_LOGIC_OP_OR_INVERTED: + return "VK_LOGIC_OP_OR_INVERTED"; + case VK_LOGIC_OP_NAND: + return "VK_LOGIC_OP_NAND"; + case VK_LOGIC_OP_SET: + return "VK_LOGIC_OP_SET"; + default: + return "Unhandled VkLogicOp"; + } +} +static inline const char* string_VkBorderColor(VkBorderColor input_value) { + switch (input_value) { + case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK: + return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK"; + case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK: + return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK"; + case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK: + return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK"; + case VK_BORDER_COLOR_INT_OPAQUE_BLACK: + return "VK_BORDER_COLOR_INT_OPAQUE_BLACK"; + case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE: + return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE"; + case VK_BORDER_COLOR_INT_OPAQUE_WHITE: + return "VK_BORDER_COLOR_INT_OPAQUE_WHITE"; + case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT: + return "VK_BORDER_COLOR_FLOAT_CUSTOM_EXT"; + case VK_BORDER_COLOR_INT_CUSTOM_EXT: + return "VK_BORDER_COLOR_INT_CUSTOM_EXT"; + default: + return "Unhandled VkBorderColor"; + } +} +static inline const char* string_VkFilter(VkFilter input_value) { + switch (input_value) { + case VK_FILTER_NEAREST: + return "VK_FILTER_NEAREST"; + case VK_FILTER_LINEAR: + return "VK_FILTER_LINEAR"; + case VK_FILTER_CUBIC_EXT: + return "VK_FILTER_CUBIC_EXT"; + default: + return "Unhandled VkFilter"; + } +} +static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input_value) { + switch (input_value) { + case VK_SAMPLER_ADDRESS_MODE_REPEAT: + return "VK_SAMPLER_ADDRESS_MODE_REPEAT"; + case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT: + return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT"; + case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE: + return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE"; + case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER: + return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER"; + case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE: + return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE"; + default: + return "Unhandled VkSamplerAddressMode"; + } +} +static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_value) { + switch (input_value) { + case VK_SAMPLER_MIPMAP_MODE_NEAREST: + return "VK_SAMPLER_MIPMAP_MODE_NEAREST"; + case VK_SAMPLER_MIPMAP_MODE_LINEAR: + return "VK_SAMPLER_MIPMAP_MODE_LINEAR"; + default: + return "Unhandled VkSamplerMipmapMode"; + } +} +static inline const char* string_VkDescriptorType(VkDescriptorType input_value) { + switch (input_value) { + case VK_DESCRIPTOR_TYPE_SAMPLER: + return "VK_DESCRIPTOR_TYPE_SAMPLER"; + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"; + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: + return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"; + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: + return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"; + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: + return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"; + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: + return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: + return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"; + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"; + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: + return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"; + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: + return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"; + case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: + return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK"; + case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: + return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR"; + case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: + return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV"; + case VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM: + return "VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM"; + case VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM: + return "VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM"; + case VK_DESCRIPTOR_TYPE_MUTABLE_EXT: + return "VK_DESCRIPTOR_TYPE_MUTABLE_EXT"; + default: + return "Unhandled VkDescriptorType"; + } +} +static inline const char* string_VkAttachmentLoadOp(VkAttachmentLoadOp input_value) { + switch (input_value) { + case VK_ATTACHMENT_LOAD_OP_LOAD: + return "VK_ATTACHMENT_LOAD_OP_LOAD"; + case VK_ATTACHMENT_LOAD_OP_CLEAR: + return "VK_ATTACHMENT_LOAD_OP_CLEAR"; + case VK_ATTACHMENT_LOAD_OP_DONT_CARE: + return "VK_ATTACHMENT_LOAD_OP_DONT_CARE"; + case VK_ATTACHMENT_LOAD_OP_NONE_KHR: + return "VK_ATTACHMENT_LOAD_OP_NONE_KHR"; + default: + return "Unhandled VkAttachmentLoadOp"; + } +} +static inline const char* string_VkAttachmentStoreOp(VkAttachmentStoreOp input_value) { + switch (input_value) { + case VK_ATTACHMENT_STORE_OP_STORE: + return "VK_ATTACHMENT_STORE_OP_STORE"; + case VK_ATTACHMENT_STORE_OP_DONT_CARE: + return "VK_ATTACHMENT_STORE_OP_DONT_CARE"; + case VK_ATTACHMENT_STORE_OP_NONE: + return "VK_ATTACHMENT_STORE_OP_NONE"; + default: + return "Unhandled VkAttachmentStoreOp"; + } +} +static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value) { + switch (input_value) { + case VK_PIPELINE_BIND_POINT_GRAPHICS: + return "VK_PIPELINE_BIND_POINT_GRAPHICS"; + case VK_PIPELINE_BIND_POINT_COMPUTE: + return "VK_PIPELINE_BIND_POINT_COMPUTE"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX: + return "VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR: + return "VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR"; + case VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI: + return "VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI"; + default: + return "Unhandled VkPipelineBindPoint"; + } +} +static inline const char* string_VkCommandBufferLevel(VkCommandBufferLevel input_value) { + switch (input_value) { + case VK_COMMAND_BUFFER_LEVEL_PRIMARY: + return "VK_COMMAND_BUFFER_LEVEL_PRIMARY"; + case VK_COMMAND_BUFFER_LEVEL_SECONDARY: + return "VK_COMMAND_BUFFER_LEVEL_SECONDARY"; + default: + return "Unhandled VkCommandBufferLevel"; + } +} +static inline const char* string_VkIndexType(VkIndexType input_value) { + switch (input_value) { + case VK_INDEX_TYPE_UINT16: + return "VK_INDEX_TYPE_UINT16"; + case VK_INDEX_TYPE_UINT32: + return "VK_INDEX_TYPE_UINT32"; + case VK_INDEX_TYPE_NONE_KHR: + return "VK_INDEX_TYPE_NONE_KHR"; + case VK_INDEX_TYPE_UINT8_KHR: + return "VK_INDEX_TYPE_UINT8_KHR"; + default: + return "Unhandled VkIndexType"; + } +} +static inline const char* string_VkSubpassContents(VkSubpassContents input_value) { + switch (input_value) { + case VK_SUBPASS_CONTENTS_INLINE: + return "VK_SUBPASS_CONTENTS_INLINE"; + case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS: + return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS"; + case VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR: + return "VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR"; + default: + return "Unhandled VkSubpassContents"; + } +} +static inline const char* string_VkPointClippingBehavior(VkPointClippingBehavior input_value) { + switch (input_value) { + case VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES: + return "VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES"; + case VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY: + return "VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY"; + default: + return "Unhandled VkPointClippingBehavior"; + } +} +static inline const char* string_VkTessellationDomainOrigin(VkTessellationDomainOrigin input_value) { + switch (input_value) { + case VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT: + return "VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT"; + case VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT: + return "VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT"; + default: + return "Unhandled VkTessellationDomainOrigin"; + } +} +static inline const char* string_VkSamplerYcbcrModelConversion(VkSamplerYcbcrModelConversion input_value) { + switch (input_value) { + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601"; + case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020: + return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020"; + default: + return "Unhandled VkSamplerYcbcrModelConversion"; + } +} +static inline const char* string_VkSamplerYcbcrRange(VkSamplerYcbcrRange input_value) { + switch (input_value) { + case VK_SAMPLER_YCBCR_RANGE_ITU_FULL: + return "VK_SAMPLER_YCBCR_RANGE_ITU_FULL"; + case VK_SAMPLER_YCBCR_RANGE_ITU_NARROW: + return "VK_SAMPLER_YCBCR_RANGE_ITU_NARROW"; + default: + return "Unhandled VkSamplerYcbcrRange"; + } +} +static inline const char* string_VkChromaLocation(VkChromaLocation input_value) { + switch (input_value) { + case VK_CHROMA_LOCATION_COSITED_EVEN: + return "VK_CHROMA_LOCATION_COSITED_EVEN"; + case VK_CHROMA_LOCATION_MIDPOINT: + return "VK_CHROMA_LOCATION_MIDPOINT"; + default: + return "Unhandled VkChromaLocation"; + } +} +static inline const char* string_VkDescriptorUpdateTemplateType(VkDescriptorUpdateTemplateType input_value) { + switch (input_value) { + case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET: + return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET"; + case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR: + return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR"; + default: + return "Unhandled VkDescriptorUpdateTemplateType"; + } +} +static inline const char* string_VkDriverId(VkDriverId input_value) { + switch (input_value) { + case VK_DRIVER_ID_AMD_PROPRIETARY: + return "VK_DRIVER_ID_AMD_PROPRIETARY"; + case VK_DRIVER_ID_AMD_OPEN_SOURCE: + return "VK_DRIVER_ID_AMD_OPEN_SOURCE"; + case VK_DRIVER_ID_MESA_RADV: + return "VK_DRIVER_ID_MESA_RADV"; + case VK_DRIVER_ID_NVIDIA_PROPRIETARY: + return "VK_DRIVER_ID_NVIDIA_PROPRIETARY"; + case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS: + return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS"; + case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA: + return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA"; + case VK_DRIVER_ID_IMAGINATION_PROPRIETARY: + return "VK_DRIVER_ID_IMAGINATION_PROPRIETARY"; + case VK_DRIVER_ID_QUALCOMM_PROPRIETARY: + return "VK_DRIVER_ID_QUALCOMM_PROPRIETARY"; + case VK_DRIVER_ID_ARM_PROPRIETARY: + return "VK_DRIVER_ID_ARM_PROPRIETARY"; + case VK_DRIVER_ID_GOOGLE_SWIFTSHADER: + return "VK_DRIVER_ID_GOOGLE_SWIFTSHADER"; + case VK_DRIVER_ID_GGP_PROPRIETARY: + return "VK_DRIVER_ID_GGP_PROPRIETARY"; + case VK_DRIVER_ID_BROADCOM_PROPRIETARY: + return "VK_DRIVER_ID_BROADCOM_PROPRIETARY"; + case VK_DRIVER_ID_MESA_LLVMPIPE: + return "VK_DRIVER_ID_MESA_LLVMPIPE"; + case VK_DRIVER_ID_MOLTENVK: + return "VK_DRIVER_ID_MOLTENVK"; + case VK_DRIVER_ID_COREAVI_PROPRIETARY: + return "VK_DRIVER_ID_COREAVI_PROPRIETARY"; + case VK_DRIVER_ID_JUICE_PROPRIETARY: + return "VK_DRIVER_ID_JUICE_PROPRIETARY"; + case VK_DRIVER_ID_VERISILICON_PROPRIETARY: + return "VK_DRIVER_ID_VERISILICON_PROPRIETARY"; + case VK_DRIVER_ID_MESA_TURNIP: + return "VK_DRIVER_ID_MESA_TURNIP"; + case VK_DRIVER_ID_MESA_V3DV: + return "VK_DRIVER_ID_MESA_V3DV"; + case VK_DRIVER_ID_MESA_PANVK: + return "VK_DRIVER_ID_MESA_PANVK"; + case VK_DRIVER_ID_SAMSUNG_PROPRIETARY: + return "VK_DRIVER_ID_SAMSUNG_PROPRIETARY"; + case VK_DRIVER_ID_MESA_VENUS: + return "VK_DRIVER_ID_MESA_VENUS"; + case VK_DRIVER_ID_MESA_DOZEN: + return "VK_DRIVER_ID_MESA_DOZEN"; + case VK_DRIVER_ID_MESA_NVK: + return "VK_DRIVER_ID_MESA_NVK"; + case VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA: + return "VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA"; + case VK_DRIVER_ID_MESA_HONEYKRISP: + return "VK_DRIVER_ID_MESA_HONEYKRISP"; + case VK_DRIVER_ID_RESERVED_27: + return "VK_DRIVER_ID_RESERVED_27"; + default: + return "Unhandled VkDriverId"; + } +} +static inline const char* string_VkShaderFloatControlsIndependence(VkShaderFloatControlsIndependence input_value) { + switch (input_value) { + case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY: + return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY"; + case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL: + return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL"; + case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE: + return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE"; + default: + return "Unhandled VkShaderFloatControlsIndependence"; + } +} +static inline const char* string_VkSamplerReductionMode(VkSamplerReductionMode input_value) { + switch (input_value) { + case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE: + return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE"; + case VK_SAMPLER_REDUCTION_MODE_MIN: + return "VK_SAMPLER_REDUCTION_MODE_MIN"; + case VK_SAMPLER_REDUCTION_MODE_MAX: + return "VK_SAMPLER_REDUCTION_MODE_MAX"; + case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM: + return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM"; + default: + return "Unhandled VkSamplerReductionMode"; + } +} +static inline const char* string_VkSemaphoreType(VkSemaphoreType input_value) { + switch (input_value) { + case VK_SEMAPHORE_TYPE_BINARY: + return "VK_SEMAPHORE_TYPE_BINARY"; + case VK_SEMAPHORE_TYPE_TIMELINE: + return "VK_SEMAPHORE_TYPE_TIMELINE"; + default: + return "Unhandled VkSemaphoreType"; + } +} +static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value) { + switch (input_value) { + case VK_PRESENT_MODE_IMMEDIATE_KHR: + return "VK_PRESENT_MODE_IMMEDIATE_KHR"; + case VK_PRESENT_MODE_MAILBOX_KHR: + return "VK_PRESENT_MODE_MAILBOX_KHR"; + case VK_PRESENT_MODE_FIFO_KHR: + return "VK_PRESENT_MODE_FIFO_KHR"; + case VK_PRESENT_MODE_FIFO_RELAXED_KHR: + return "VK_PRESENT_MODE_FIFO_RELAXED_KHR"; + case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: + return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR"; + case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: + return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR"; + //case VK_PRESENT_MODE_FIFO_LATEST_READY_EXT: + //return "VK_PRESENT_MODE_FIFO_LATEST_READY_EXT"; + default: + return "Unhandled VkPresentModeKHR"; + } +} +static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value) { + switch (input_value) { + case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR: + return "VK_COLOR_SPACE_SRGB_NONLINEAR_KHR"; + case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT: + return "VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT"; + case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT: + return "VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT"; + case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT: + return "VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT"; + case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT: + return "VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT"; + case VK_COLOR_SPACE_BT709_LINEAR_EXT: + return "VK_COLOR_SPACE_BT709_LINEAR_EXT"; + case VK_COLOR_SPACE_BT709_NONLINEAR_EXT: + return "VK_COLOR_SPACE_BT709_NONLINEAR_EXT"; + case VK_COLOR_SPACE_BT2020_LINEAR_EXT: + return "VK_COLOR_SPACE_BT2020_LINEAR_EXT"; + case VK_COLOR_SPACE_HDR10_ST2084_EXT: + return "VK_COLOR_SPACE_HDR10_ST2084_EXT"; + case VK_COLOR_SPACE_DOLBYVISION_EXT: + return "VK_COLOR_SPACE_DOLBYVISION_EXT"; + case VK_COLOR_SPACE_HDR10_HLG_EXT: + return "VK_COLOR_SPACE_HDR10_HLG_EXT"; + case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT: + return "VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT"; + case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT: + return "VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT"; + case VK_COLOR_SPACE_PASS_THROUGH_EXT: + return "VK_COLOR_SPACE_PASS_THROUGH_EXT"; + case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT: + return "VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT"; + case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD: + return "VK_COLOR_SPACE_DISPLAY_NATIVE_AMD"; + default: + return "Unhandled VkColorSpaceKHR"; + } +} +static inline const char* string_VkQueryResultStatusKHR(VkQueryResultStatusKHR input_value) { + switch (input_value) { + case VK_QUERY_RESULT_STATUS_ERROR_KHR: + return "VK_QUERY_RESULT_STATUS_ERROR_KHR"; + case VK_QUERY_RESULT_STATUS_NOT_READY_KHR: + return "VK_QUERY_RESULT_STATUS_NOT_READY_KHR"; + case VK_QUERY_RESULT_STATUS_COMPLETE_KHR: + return "VK_QUERY_RESULT_STATUS_COMPLETE_KHR"; + case VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR: + return "VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR"; + default: + return "Unhandled VkQueryResultStatusKHR"; + } +} +static inline const char* string_VkPerformanceCounterUnitKHR(VkPerformanceCounterUnitKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR"; + case VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR: + return "VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR"; + default: + return "Unhandled VkPerformanceCounterUnitKHR"; + } +} +static inline const char* string_VkPerformanceCounterScopeKHR(VkPerformanceCounterScopeKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR: + return "VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR"; + case VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR: + return "VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR"; + case VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR: + return "VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR"; + default: + return "Unhandled VkPerformanceCounterScopeKHR"; + } +} +static inline const char* string_VkPerformanceCounterStorageKHR(VkPerformanceCounterStorageKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR"; + case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR: + return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR"; + default: + return "Unhandled VkPerformanceCounterStorageKHR"; + } +} +static inline const char* string_VkQueueGlobalPriorityKHR(VkQueueGlobalPriorityKHR input_value) { + switch (input_value) { + case VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR: + return "VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR"; + case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR: + return "VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR"; + case VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR: + return "VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR"; + case VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR: + return "VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR"; + default: + return "Unhandled VkQueueGlobalPriorityKHR"; + } +} +static inline const char* string_VkFragmentShadingRateCombinerOpKHR(VkFragmentShadingRateCombinerOpKHR input_value) { + switch (input_value) { + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR"; + case VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR: + return "VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR"; + default: + return "Unhandled VkFragmentShadingRateCombinerOpKHR"; + } +} +static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipelineExecutableStatisticFormatKHR input_value) { + switch (input_value) { + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR"; + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR"; + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR"; + case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR: + return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR"; + default: + return "Unhandled VkPipelineExecutableStatisticFormatKHR"; + } +} +static inline const char* string_VkVideoEncodeTuningModeKHR(VkVideoEncodeTuningModeKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR"; + case VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR: + return "VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR"; + default: + return "Unhandled VkVideoEncodeTuningModeKHR"; + } +} +static inline const char* string_VkComponentTypeKHR(VkComponentTypeKHR input_value) { + switch (input_value) { + case VK_COMPONENT_TYPE_FLOAT16_KHR: + return "VK_COMPONENT_TYPE_FLOAT16_KHR"; + case VK_COMPONENT_TYPE_FLOAT32_KHR: + return "VK_COMPONENT_TYPE_FLOAT32_KHR"; + case VK_COMPONENT_TYPE_FLOAT64_KHR: + return "VK_COMPONENT_TYPE_FLOAT64_KHR"; + case VK_COMPONENT_TYPE_SINT8_KHR: + return "VK_COMPONENT_TYPE_SINT8_KHR"; + case VK_COMPONENT_TYPE_SINT16_KHR: + return "VK_COMPONENT_TYPE_SINT16_KHR"; + case VK_COMPONENT_TYPE_SINT32_KHR: + return "VK_COMPONENT_TYPE_SINT32_KHR"; + case VK_COMPONENT_TYPE_SINT64_KHR: + return "VK_COMPONENT_TYPE_SINT64_KHR"; + case VK_COMPONENT_TYPE_UINT8_KHR: + return "VK_COMPONENT_TYPE_UINT8_KHR"; + case VK_COMPONENT_TYPE_UINT16_KHR: + return "VK_COMPONENT_TYPE_UINT16_KHR"; + case VK_COMPONENT_TYPE_UINT32_KHR: + return "VK_COMPONENT_TYPE_UINT32_KHR"; + case VK_COMPONENT_TYPE_UINT64_KHR: + return "VK_COMPONENT_TYPE_UINT64_KHR"; + default: + return "Unhandled VkComponentTypeKHR"; + } +} +static inline const char* string_VkScopeKHR(VkScopeKHR input_value) { + switch (input_value) { + case VK_SCOPE_DEVICE_KHR: + return "VK_SCOPE_DEVICE_KHR"; + case VK_SCOPE_WORKGROUP_KHR: + return "VK_SCOPE_WORKGROUP_KHR"; + case VK_SCOPE_SUBGROUP_KHR: + return "VK_SCOPE_SUBGROUP_KHR"; + case VK_SCOPE_QUEUE_FAMILY_KHR: + return "VK_SCOPE_QUEUE_FAMILY_KHR"; + default: + return "Unhandled VkScopeKHR"; + } +} +static inline const char* string_VkLineRasterizationModeKHR(VkLineRasterizationModeKHR input_value) { + switch (input_value) { + case VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR: + return "VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR"; + case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR: + return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR"; + case VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR: + return "VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR"; + case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR: + return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR"; + default: + return "Unhandled VkLineRasterizationModeKHR"; + } +} +static inline const char* string_VkTimeDomainKHR(VkTimeDomainKHR input_value) { + switch (input_value) { + case VK_TIME_DOMAIN_DEVICE_KHR: + return "VK_TIME_DOMAIN_DEVICE_KHR"; + case VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR: + return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR"; + case VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR: + return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR"; + case VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR: + return "VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR"; + default: + return "Unhandled VkTimeDomainKHR"; + } +} +static inline const char* string_VkPhysicalDeviceLayeredApiKHR(VkPhysicalDeviceLayeredApiKHR input_value) { + switch (input_value) { + case VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR"; + case VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR: + return "VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR"; + default: + return "Unhandled VkPhysicalDeviceLayeredApiKHR"; + } +} +static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value) { + switch (input_value) { + case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT"; + case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT: + return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT"; + default: + return "Unhandled VkDebugReportObjectTypeEXT"; + } +} +static inline const char* string_VkRasterizationOrderAMD(VkRasterizationOrderAMD input_value) { + switch (input_value) { + case VK_RASTERIZATION_ORDER_STRICT_AMD: + return "VK_RASTERIZATION_ORDER_STRICT_AMD"; + case VK_RASTERIZATION_ORDER_RELAXED_AMD: + return "VK_RASTERIZATION_ORDER_RELAXED_AMD"; + default: + return "Unhandled VkRasterizationOrderAMD"; + } +} +static inline const char* string_VkShaderInfoTypeAMD(VkShaderInfoTypeAMD input_value) { + switch (input_value) { + case VK_SHADER_INFO_TYPE_STATISTICS_AMD: + return "VK_SHADER_INFO_TYPE_STATISTICS_AMD"; + case VK_SHADER_INFO_TYPE_BINARY_AMD: + return "VK_SHADER_INFO_TYPE_BINARY_AMD"; + case VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD: + return "VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD"; + default: + return "Unhandled VkShaderInfoTypeAMD"; + } +} +static inline const char* string_VkValidationCheckEXT(VkValidationCheckEXT input_value) { + switch (input_value) { + case VK_VALIDATION_CHECK_ALL_EXT: + return "VK_VALIDATION_CHECK_ALL_EXT"; + case VK_VALIDATION_CHECK_SHADERS_EXT: + return "VK_VALIDATION_CHECK_SHADERS_EXT"; + default: + return "Unhandled VkValidationCheckEXT"; + } +} +static inline const char* string_VkPipelineRobustnessBufferBehaviorEXT(VkPipelineRobustnessBufferBehaviorEXT input_value) { + switch (input_value) { + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT"; + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT"; + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT"; + case VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT: + return "VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT"; + default: + return "Unhandled VkPipelineRobustnessBufferBehaviorEXT"; + } +} +static inline const char* string_VkPipelineRobustnessImageBehaviorEXT(VkPipelineRobustnessImageBehaviorEXT input_value) { + switch (input_value) { + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT"; + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT"; + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT"; + case VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT: + return "VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT"; + default: + return "Unhandled VkPipelineRobustnessImageBehaviorEXT"; + } +} +static inline const char* string_VkDisplayPowerStateEXT(VkDisplayPowerStateEXT input_value) { + switch (input_value) { + case VK_DISPLAY_POWER_STATE_OFF_EXT: + return "VK_DISPLAY_POWER_STATE_OFF_EXT"; + case VK_DISPLAY_POWER_STATE_SUSPEND_EXT: + return "VK_DISPLAY_POWER_STATE_SUSPEND_EXT"; + case VK_DISPLAY_POWER_STATE_ON_EXT: + return "VK_DISPLAY_POWER_STATE_ON_EXT"; + default: + return "Unhandled VkDisplayPowerStateEXT"; + } +} +static inline const char* string_VkDeviceEventTypeEXT(VkDeviceEventTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT: + return "VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT"; + default: + return "Unhandled VkDeviceEventTypeEXT"; + } +} +static inline const char* string_VkDisplayEventTypeEXT(VkDisplayEventTypeEXT input_value) { + switch (input_value) { + case VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT: + return "VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT"; + default: + return "Unhandled VkDisplayEventTypeEXT"; + } +} +static inline const char* string_VkViewportCoordinateSwizzleNV(VkViewportCoordinateSwizzleNV input_value) { + switch (input_value) { + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV"; + case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV: + return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV"; + default: + return "Unhandled VkViewportCoordinateSwizzleNV"; + } +} +static inline const char* string_VkDiscardRectangleModeEXT(VkDiscardRectangleModeEXT input_value) { + switch (input_value) { + case VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT: + return "VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT"; + case VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT: + return "VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT"; + default: + return "Unhandled VkDiscardRectangleModeEXT"; + } +} +static inline const char* string_VkConservativeRasterizationModeEXT(VkConservativeRasterizationModeEXT input_value) { + switch (input_value) { + case VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT: + return "VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT"; + case VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT: + return "VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT"; + case VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT: + return "VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT"; + default: + return "Unhandled VkConservativeRasterizationModeEXT"; + } +} +static inline const char* string_VkBlendOverlapEXT(VkBlendOverlapEXT input_value) { + switch (input_value) { + case VK_BLEND_OVERLAP_UNCORRELATED_EXT: + return "VK_BLEND_OVERLAP_UNCORRELATED_EXT"; + case VK_BLEND_OVERLAP_DISJOINT_EXT: + return "VK_BLEND_OVERLAP_DISJOINT_EXT"; + case VK_BLEND_OVERLAP_CONJOINT_EXT: + return "VK_BLEND_OVERLAP_CONJOINT_EXT"; + default: + return "Unhandled VkBlendOverlapEXT"; + } +} +static inline const char* string_VkCoverageModulationModeNV(VkCoverageModulationModeNV input_value) { + switch (input_value) { + case VK_COVERAGE_MODULATION_MODE_NONE_NV: + return "VK_COVERAGE_MODULATION_MODE_NONE_NV"; + case VK_COVERAGE_MODULATION_MODE_RGB_NV: + return "VK_COVERAGE_MODULATION_MODE_RGB_NV"; + case VK_COVERAGE_MODULATION_MODE_ALPHA_NV: + return "VK_COVERAGE_MODULATION_MODE_ALPHA_NV"; + case VK_COVERAGE_MODULATION_MODE_RGBA_NV: + return "VK_COVERAGE_MODULATION_MODE_RGBA_NV"; + default: + return "Unhandled VkCoverageModulationModeNV"; + } +} +static inline const char* string_VkValidationCacheHeaderVersionEXT(VkValidationCacheHeaderVersionEXT input_value) { + switch (input_value) { + case VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT: + return "VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT"; + default: + return "Unhandled VkValidationCacheHeaderVersionEXT"; + } +} +static inline const char* string_VkShadingRatePaletteEntryNV(VkShadingRatePaletteEntryNV input_value) { + switch (input_value) { + case VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV"; + case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV: + return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV"; + default: + return "Unhandled VkShadingRatePaletteEntryNV"; + } +} +static inline const char* string_VkCoarseSampleOrderTypeNV(VkCoarseSampleOrderTypeNV input_value) { + switch (input_value) { + case VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV"; + case VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV"; + case VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV"; + case VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV: + return "VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV"; + default: + return "Unhandled VkCoarseSampleOrderTypeNV"; + } +} +static inline const char* string_VkRayTracingShaderGroupTypeKHR(VkRayTracingShaderGroupTypeKHR input_value) { + switch (input_value) { + case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR"; + case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR: + return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR"; + default: + return "Unhandled VkRayTracingShaderGroupTypeKHR"; + } +} +static inline const char* string_VkGeometryTypeKHR(VkGeometryTypeKHR input_value) { + switch (input_value) { + case VK_GEOMETRY_TYPE_TRIANGLES_KHR: + return "VK_GEOMETRY_TYPE_TRIANGLES_KHR"; + case VK_GEOMETRY_TYPE_AABBS_KHR: + return "VK_GEOMETRY_TYPE_AABBS_KHR"; + case VK_GEOMETRY_TYPE_INSTANCES_KHR: + return "VK_GEOMETRY_TYPE_INSTANCES_KHR"; + default: + return "Unhandled VkGeometryTypeKHR"; + } +} +static inline const char* string_VkAccelerationStructureTypeKHR(VkAccelerationStructureTypeKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR"; + case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR"; + case VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR: + return "VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR"; + default: + return "Unhandled VkAccelerationStructureTypeKHR"; + } +} +static inline const char* string_VkCopyAccelerationStructureModeKHR(VkCopyAccelerationStructureModeKHR input_value) { + switch (input_value) { + case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR"; + case VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR: + return "VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR"; + default: + return "Unhandled VkCopyAccelerationStructureModeKHR"; + } +} +static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeNV(VkAccelerationStructureMemoryRequirementsTypeNV input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV"; + case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV: + return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV"; + default: + return "Unhandled VkAccelerationStructureMemoryRequirementsTypeNV"; + } +} +static inline const char* string_VkMemoryOverallocationBehaviorAMD(VkMemoryOverallocationBehaviorAMD input_value) { + switch (input_value) { + case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD: + return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD"; + case VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD: + return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD"; + case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD: + return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD"; + default: + return "Unhandled VkMemoryOverallocationBehaviorAMD"; + } +} +static inline const char* string_VkPerformanceConfigurationTypeINTEL(VkPerformanceConfigurationTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL: + return "VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL"; + default: + return "Unhandled VkPerformanceConfigurationTypeINTEL"; + } +} +static inline const char* string_VkQueryPoolSamplingModeINTEL(VkQueryPoolSamplingModeINTEL input_value) { + switch (input_value) { + case VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL: + return "VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL"; + default: + return "Unhandled VkQueryPoolSamplingModeINTEL"; + } +} +static inline const char* string_VkPerformanceOverrideTypeINTEL(VkPerformanceOverrideTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL: + return "VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL"; + case VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL: + return "VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL"; + default: + return "Unhandled VkPerformanceOverrideTypeINTEL"; + } +} +static inline const char* string_VkPerformanceParameterTypeINTEL(VkPerformanceParameterTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL: + return "VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL"; + case VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL: + return "VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL"; + default: + return "Unhandled VkPerformanceParameterTypeINTEL"; + } +} +static inline const char* string_VkPerformanceValueTypeINTEL(VkPerformanceValueTypeINTEL input_value) { + switch (input_value) { + case VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL"; + case VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL: + return "VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL"; + default: + return "Unhandled VkPerformanceValueTypeINTEL"; + } +} +static inline const char* string_VkValidationFeatureEnableEXT(VkValidationFeatureEnableEXT input_value) { + switch (input_value) { + case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT"; + case VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT: + return "VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT"; + default: + return "Unhandled VkValidationFeatureEnableEXT"; + } +} +static inline const char* string_VkValidationFeatureDisableEXT(VkValidationFeatureDisableEXT input_value) { + switch (input_value) { + case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_ALL_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT"; + case VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT: + return "VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT"; + default: + return "Unhandled VkValidationFeatureDisableEXT"; + } +} +static inline const char* string_VkCoverageReductionModeNV(VkCoverageReductionModeNV input_value) { + switch (input_value) { + case VK_COVERAGE_REDUCTION_MODE_MERGE_NV: + return "VK_COVERAGE_REDUCTION_MODE_MERGE_NV"; + case VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV: + return "VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV"; + default: + return "Unhandled VkCoverageReductionModeNV"; + } +} +static inline const char* string_VkProvokingVertexModeEXT(VkProvokingVertexModeEXT input_value) { + switch (input_value) { + case VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT: + return "VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT"; + case VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT: + return "VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT"; + default: + return "Unhandled VkProvokingVertexModeEXT"; + } +} +#ifdef VK_USE_PLATFORM_WIN32_KHR +static inline const char* string_VkFullScreenExclusiveEXT(VkFullScreenExclusiveEXT input_value) { + switch (input_value) { + case VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT"; + case VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT"; + case VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT"; + case VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT: + return "VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT"; + default: + return "Unhandled VkFullScreenExclusiveEXT"; + } +} +#endif // VK_USE_PLATFORM_WIN32_KHR +static inline const char* string_VkIndirectCommandsTokenTypeNV(VkIndirectCommandsTokenTypeNV input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV"; + default: + return "Unhandled VkIndirectCommandsTokenTypeNV"; + } +} +static inline const char* string_VkDepthBiasRepresentationEXT(VkDepthBiasRepresentationEXT input_value) { + switch (input_value) { + case VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT: + return "VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT"; + case VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT: + return "VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT"; + case VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT: + return "VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT"; + default: + return "Unhandled VkDepthBiasRepresentationEXT"; + } +} +static inline const char* string_VkDeviceMemoryReportEventTypeEXT(VkDeviceMemoryReportEventTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT"; + case VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT: + return "VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT"; + default: + return "Unhandled VkDeviceMemoryReportEventTypeEXT"; + } +} +static inline const char* string_VkFragmentShadingRateTypeNV(VkFragmentShadingRateTypeNV input_value) { + switch (input_value) { + case VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV: + return "VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV"; + case VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV: + return "VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV"; + default: + return "Unhandled VkFragmentShadingRateTypeNV"; + } +} +static inline const char* string_VkFragmentShadingRateNV(VkFragmentShadingRateNV input_value) { + switch (input_value) { + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV: + return "VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV"; + case VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV: + return "VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV"; + case VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV: + return "VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV"; + default: + return "Unhandled VkFragmentShadingRateNV"; + } +} +static inline const char* string_VkAccelerationStructureMotionInstanceTypeNV(VkAccelerationStructureMotionInstanceTypeNV input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV: + return "VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV"; + case VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV: + return "VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV"; + case VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV: + return "VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV"; + default: + return "Unhandled VkAccelerationStructureMotionInstanceTypeNV"; + } +} +static inline const char* string_VkDeviceFaultAddressTypeEXT(VkDeviceFaultAddressTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT"; + case VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT: + return "VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT"; + default: + return "Unhandled VkDeviceFaultAddressTypeEXT"; + } +} +static inline const char* string_VkDeviceFaultVendorBinaryHeaderVersionEXT(VkDeviceFaultVendorBinaryHeaderVersionEXT input_value) { + switch (input_value) { + case VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT: + return "VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT"; + default: + return "Unhandled VkDeviceFaultVendorBinaryHeaderVersionEXT"; + } +} +static inline const char* string_VkDeviceAddressBindingTypeEXT(VkDeviceAddressBindingTypeEXT input_value) { + switch (input_value) { + case VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT: + return "VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT"; + case VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT: + return "VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT"; + default: + return "Unhandled VkDeviceAddressBindingTypeEXT"; + } +} +static inline const char* string_VkMicromapTypeEXT(VkMicromapTypeEXT input_value) { + switch (input_value) { + case VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT: + return "VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV: + return "VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + default: + return "Unhandled VkMicromapTypeEXT"; + } +} +static inline const char* string_VkBuildMicromapModeEXT(VkBuildMicromapModeEXT input_value) { + switch (input_value) { + case VK_BUILD_MICROMAP_MODE_BUILD_EXT: + return "VK_BUILD_MICROMAP_MODE_BUILD_EXT"; + default: + return "Unhandled VkBuildMicromapModeEXT"; + } +} +static inline const char* string_VkCopyMicromapModeEXT(VkCopyMicromapModeEXT input_value) { + switch (input_value) { + case VK_COPY_MICROMAP_MODE_CLONE_EXT: + return "VK_COPY_MICROMAP_MODE_CLONE_EXT"; + case VK_COPY_MICROMAP_MODE_SERIALIZE_EXT: + return "VK_COPY_MICROMAP_MODE_SERIALIZE_EXT"; + case VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT: + return "VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT"; + case VK_COPY_MICROMAP_MODE_COMPACT_EXT: + return "VK_COPY_MICROMAP_MODE_COMPACT_EXT"; + default: + return "Unhandled VkCopyMicromapModeEXT"; + } +} +static inline const char* string_VkOpacityMicromapFormatEXT(VkOpacityMicromapFormatEXT input_value) { + switch (input_value) { + case VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT: + return "VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT"; + case VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT: + return "VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT"; + default: + return "Unhandled VkOpacityMicromapFormatEXT"; + } +} +static inline const char* string_VkOpacityMicromapSpecialIndexEXT(VkOpacityMicromapSpecialIndexEXT input_value) { + switch (input_value) { + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT"; + case VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT: + return "VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT"; + default: + return "Unhandled VkOpacityMicromapSpecialIndexEXT"; + } +} +static inline const char* string_VkAccelerationStructureCompatibilityKHR(VkAccelerationStructureCompatibilityKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR: + return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR"; + case VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR: + return "VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR"; + default: + return "Unhandled VkAccelerationStructureCompatibilityKHR"; + } +} +static inline const char* string_VkAccelerationStructureBuildTypeKHR(VkAccelerationStructureBuildTypeKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR"; + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR"; + case VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR: + return "VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR"; + default: + return "Unhandled VkAccelerationStructureBuildTypeKHR"; + } +} +#ifdef VK_ENABLE_BETA_EXTENSIONS +static inline const char* string_VkDisplacementMicromapFormatNV(VkDisplacementMicromapFormatNV input_value) { + switch (input_value) { + case VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV: + return "VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV"; + case VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV: + return "VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV"; + case VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV: + return "VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV"; + default: + return "Unhandled VkDisplacementMicromapFormatNV"; + } +} +#endif // VK_ENABLE_BETA_EXTENSIONS +static inline const char* string_VkSubpassMergeStatusEXT(VkSubpassMergeStatusEXT input_value) { + switch (input_value) { + case VK_SUBPASS_MERGE_STATUS_MERGED_EXT: + return "VK_SUBPASS_MERGE_STATUS_MERGED_EXT"; + case VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT: + return "VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT"; + case VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT: + return "VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT"; + default: + return "Unhandled VkSubpassMergeStatusEXT"; + } +} +static inline const char* string_VkDirectDriverLoadingModeLUNARG(VkDirectDriverLoadingModeLUNARG input_value) { + switch (input_value) { + case VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG: + return "VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG"; + case VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG: + return "VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG"; + default: + return "Unhandled VkDirectDriverLoadingModeLUNARG"; + } +} +static inline const char* string_VkOpticalFlowPerformanceLevelNV(VkOpticalFlowPerformanceLevelNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV"; + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV"; + case VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV: + return "VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV"; + default: + return "Unhandled VkOpticalFlowPerformanceLevelNV"; + } +} +static inline const char* string_VkOpticalFlowSessionBindingPointNV(VkOpticalFlowSessionBindingPointNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV"; + case VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV: + return "VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV"; + default: + return "Unhandled VkOpticalFlowSessionBindingPointNV"; + } +} +static inline const char* string_VkAntiLagModeAMD(VkAntiLagModeAMD input_value) { + switch (input_value) { + case VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD: + return "VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD"; + case VK_ANTI_LAG_MODE_ON_AMD: + return "VK_ANTI_LAG_MODE_ON_AMD"; + case VK_ANTI_LAG_MODE_OFF_AMD: + return "VK_ANTI_LAG_MODE_OFF_AMD"; + default: + return "Unhandled VkAntiLagModeAMD"; + } +} +static inline const char* string_VkAntiLagStageAMD(VkAntiLagStageAMD input_value) { + switch (input_value) { + case VK_ANTI_LAG_STAGE_INPUT_AMD: + return "VK_ANTI_LAG_STAGE_INPUT_AMD"; + case VK_ANTI_LAG_STAGE_PRESENT_AMD: + return "VK_ANTI_LAG_STAGE_PRESENT_AMD"; + default: + return "Unhandled VkAntiLagStageAMD"; + } +} +static inline const char* string_VkShaderCodeTypeEXT(VkShaderCodeTypeEXT input_value) { + switch (input_value) { + case VK_SHADER_CODE_TYPE_BINARY_EXT: + return "VK_SHADER_CODE_TYPE_BINARY_EXT"; + case VK_SHADER_CODE_TYPE_SPIRV_EXT: + return "VK_SHADER_CODE_TYPE_SPIRV_EXT"; + default: + return "Unhandled VkShaderCodeTypeEXT"; + } +} +static inline const char* string_VkDepthClampModeEXT(VkDepthClampModeEXT input_value) { + switch (input_value) { + case VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT: + return "VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT"; + case VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT: + return "VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT"; + default: + return "Unhandled VkDepthClampModeEXT"; + } +} +static inline const char* string_VkRayTracingInvocationReorderModeNV(VkRayTracingInvocationReorderModeNV input_value) { + switch (input_value) { + case VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV: + return "VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV"; + case VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV: + return "VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV"; + default: + return "Unhandled VkRayTracingInvocationReorderModeNV"; + } +} +static inline const char* string_VkLayerSettingTypeEXT(VkLayerSettingTypeEXT input_value) { + switch (input_value) { + case VK_LAYER_SETTING_TYPE_BOOL32_EXT: + return "VK_LAYER_SETTING_TYPE_BOOL32_EXT"; + case VK_LAYER_SETTING_TYPE_INT32_EXT: + return "VK_LAYER_SETTING_TYPE_INT32_EXT"; + case VK_LAYER_SETTING_TYPE_INT64_EXT: + return "VK_LAYER_SETTING_TYPE_INT64_EXT"; + case VK_LAYER_SETTING_TYPE_UINT32_EXT: + return "VK_LAYER_SETTING_TYPE_UINT32_EXT"; + case VK_LAYER_SETTING_TYPE_UINT64_EXT: + return "VK_LAYER_SETTING_TYPE_UINT64_EXT"; + case VK_LAYER_SETTING_TYPE_FLOAT32_EXT: + return "VK_LAYER_SETTING_TYPE_FLOAT32_EXT"; + case VK_LAYER_SETTING_TYPE_FLOAT64_EXT: + return "VK_LAYER_SETTING_TYPE_FLOAT64_EXT"; + case VK_LAYER_SETTING_TYPE_STRING_EXT: + return "VK_LAYER_SETTING_TYPE_STRING_EXT"; + default: + return "Unhandled VkLayerSettingTypeEXT"; + } +} +static inline const char* string_VkLatencyMarkerNV(VkLatencyMarkerNV input_value) { + switch (input_value) { + case VK_LATENCY_MARKER_SIMULATION_START_NV: + return "VK_LATENCY_MARKER_SIMULATION_START_NV"; + case VK_LATENCY_MARKER_SIMULATION_END_NV: + return "VK_LATENCY_MARKER_SIMULATION_END_NV"; + case VK_LATENCY_MARKER_RENDERSUBMIT_START_NV: + return "VK_LATENCY_MARKER_RENDERSUBMIT_START_NV"; + case VK_LATENCY_MARKER_RENDERSUBMIT_END_NV: + return "VK_LATENCY_MARKER_RENDERSUBMIT_END_NV"; + case VK_LATENCY_MARKER_PRESENT_START_NV: + return "VK_LATENCY_MARKER_PRESENT_START_NV"; + case VK_LATENCY_MARKER_PRESENT_END_NV: + return "VK_LATENCY_MARKER_PRESENT_END_NV"; + case VK_LATENCY_MARKER_INPUT_SAMPLE_NV: + return "VK_LATENCY_MARKER_INPUT_SAMPLE_NV"; + case VK_LATENCY_MARKER_TRIGGER_FLASH_NV: + return "VK_LATENCY_MARKER_TRIGGER_FLASH_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV"; + case VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV: + return "VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV"; + default: + return "Unhandled VkLatencyMarkerNV"; + } +} +static inline const char* string_VkOutOfBandQueueTypeNV(VkOutOfBandQueueTypeNV input_value) { + switch (input_value) { + case VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV: + return "VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV"; + case VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV: + return "VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV"; + default: + return "Unhandled VkOutOfBandQueueTypeNV"; + } +} +static inline const char* string_VkBlockMatchWindowCompareModeQCOM(VkBlockMatchWindowCompareModeQCOM input_value) { + switch (input_value) { + case VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM: + return "VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM"; + case VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM: + return "VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM"; + default: + return "Unhandled VkBlockMatchWindowCompareModeQCOM"; + } +} +static inline const char* string_VkCubicFilterWeightsQCOM(VkCubicFilterWeightsQCOM input_value) { + switch (input_value) { + case VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM"; + case VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM"; + case VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM"; + case VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM: + return "VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM"; + default: + return "Unhandled VkCubicFilterWeightsQCOM"; + } +} +static inline const char* string_VkLayeredDriverUnderlyingApiMSFT(VkLayeredDriverUnderlyingApiMSFT input_value) { + switch (input_value) { + case VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT: + return "VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT"; + case VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT: + return "VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT"; + default: + return "Unhandled VkLayeredDriverUnderlyingApiMSFT"; + } +} +static inline const char* string_VkIndirectExecutionSetInfoTypeEXT(VkIndirectExecutionSetInfoTypeEXT input_value) { + switch (input_value) { + case VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT: + return "VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT"; + case VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT: + return "VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT"; + default: + return "Unhandled VkIndirectExecutionSetInfoTypeEXT"; + } +} +static inline const char* string_VkIndirectCommandsTokenTypeEXT(VkIndirectCommandsTokenTypeEXT input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT"; + case VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT: + return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT"; + default: + return "Unhandled VkIndirectCommandsTokenTypeEXT"; + } +} +static inline const char* string_VkBuildAccelerationStructureModeKHR(VkBuildAccelerationStructureModeKHR input_value) { + switch (input_value) { + case VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR"; + default: + return "Unhandled VkBuildAccelerationStructureModeKHR"; + } +} +static inline const char* string_VkShaderGroupShaderKHR(VkShaderGroupShaderKHR input_value) { + switch (input_value) { + case VK_SHADER_GROUP_SHADER_GENERAL_KHR: + return "VK_SHADER_GROUP_SHADER_GENERAL_KHR"; + case VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR: + return "VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR"; + case VK_SHADER_GROUP_SHADER_ANY_HIT_KHR: + return "VK_SHADER_GROUP_SHADER_ANY_HIT_KHR"; + case VK_SHADER_GROUP_SHADER_INTERSECTION_KHR: + return "VK_SHADER_GROUP_SHADER_INTERSECTION_KHR"; + default: + return "Unhandled VkShaderGroupShaderKHR"; + } +} + +static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value) { + switch (input_value) { + case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: + return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT"; + case VK_ACCESS_INDEX_READ_BIT: + return "VK_ACCESS_INDEX_READ_BIT"; + case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: + return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"; + case VK_ACCESS_UNIFORM_READ_BIT: + return "VK_ACCESS_UNIFORM_READ_BIT"; + case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: + return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"; + case VK_ACCESS_SHADER_READ_BIT: + return "VK_ACCESS_SHADER_READ_BIT"; + case VK_ACCESS_SHADER_WRITE_BIT: + return "VK_ACCESS_SHADER_WRITE_BIT"; + case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: + return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"; + case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: + return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: + return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: + return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"; + case VK_ACCESS_TRANSFER_READ_BIT: + return "VK_ACCESS_TRANSFER_READ_BIT"; + case VK_ACCESS_TRANSFER_WRITE_BIT: + return "VK_ACCESS_TRANSFER_WRITE_BIT"; + case VK_ACCESS_HOST_READ_BIT: + return "VK_ACCESS_HOST_READ_BIT"; + case VK_ACCESS_HOST_WRITE_BIT: + return "VK_ACCESS_HOST_WRITE_BIT"; + case VK_ACCESS_MEMORY_READ_BIT: + return "VK_ACCESS_MEMORY_READ_BIT"; + case VK_ACCESS_MEMORY_WRITE_BIT: + return "VK_ACCESS_MEMORY_WRITE_BIT"; + case VK_ACCESS_NONE: + return "VK_ACCESS_NONE"; + case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"; + case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT: + return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"; + case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT: + return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT"; + case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT: + return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"; + case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR: + return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR"; + case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR: + return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"; + case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT: + return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"; + case VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR: + return "VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR"; + case VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV: + return "VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV"; + case VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV: + return "VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV"; + default: + return "Unhandled VkAccessFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkAccessFlags(VkAccessFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccessFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccessFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_ASPECT_COLOR_BIT: + return "VK_IMAGE_ASPECT_COLOR_BIT"; + case VK_IMAGE_ASPECT_DEPTH_BIT: + return "VK_IMAGE_ASPECT_DEPTH_BIT"; + case VK_IMAGE_ASPECT_STENCIL_BIT: + return "VK_IMAGE_ASPECT_STENCIL_BIT"; + case VK_IMAGE_ASPECT_METADATA_BIT: + return "VK_IMAGE_ASPECT_METADATA_BIT"; + case VK_IMAGE_ASPECT_PLANE_0_BIT: + return "VK_IMAGE_ASPECT_PLANE_0_BIT"; + case VK_IMAGE_ASPECT_PLANE_1_BIT: + return "VK_IMAGE_ASPECT_PLANE_1_BIT"; + case VK_IMAGE_ASPECT_PLANE_2_BIT: + return "VK_IMAGE_ASPECT_PLANE_2_BIT"; + case VK_IMAGE_ASPECT_NONE: + return "VK_IMAGE_ASPECT_NONE"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT"; + case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT: + return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT"; + default: + return "Unhandled VkImageAspectFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageAspectFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageAspectFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits input_value) { + switch (input_value) { + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT"; + case VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT: + return "VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT"; + case VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT: + return "VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT"; + case VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT: + return "VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT"; + case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT: + return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT"; + case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT: + return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"; + case VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT: + return "VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT"; + case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT: + return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT"; + case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT: + return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT"; + case VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT: + return "VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT"; + case VK_FORMAT_FEATURE_BLIT_SRC_BIT: + return "VK_FORMAT_FEATURE_BLIT_SRC_BIT"; + case VK_FORMAT_FEATURE_BLIT_DST_BIT: + return "VK_FORMAT_FEATURE_BLIT_DST_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT"; + case VK_FORMAT_FEATURE_TRANSFER_SRC_BIT: + return "VK_FORMAT_FEATURE_TRANSFER_SRC_BIT"; + case VK_FORMAT_FEATURE_TRANSFER_DST_BIT: + return "VK_FORMAT_FEATURE_TRANSFER_DST_BIT"; + case VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT: + return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT"; + case VK_FORMAT_FEATURE_DISJOINT_BIT: + return "VK_FORMAT_FEATURE_DISJOINT_BIT"; + case VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT: + return "VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT"; + case VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR"; + case VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR"; + case VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR: + return "VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR"; + case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT: + return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT"; + case VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT: + return "VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT"; + case VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR"; + case VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR: + return "VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR"; + default: + return "Unhandled VkFormatFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFormatFeatureFlags(VkFormatFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFormatFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFormatFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_CREATE_SPARSE_BINDING_BIT: + return "VK_IMAGE_CREATE_SPARSE_BINDING_BIT"; + case VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT: + return "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"; + case VK_IMAGE_CREATE_SPARSE_ALIASED_BIT: + return "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"; + case VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT: + return "VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"; + case VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT: + return "VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"; + case VK_IMAGE_CREATE_ALIAS_BIT: + return "VK_IMAGE_CREATE_ALIAS_BIT"; + case VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT: + return "VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"; + case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT: + return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT"; + case VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT: + return "VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT"; + case VK_IMAGE_CREATE_EXTENDED_USAGE_BIT: + return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT"; + case VK_IMAGE_CREATE_PROTECTED_BIT: + return "VK_IMAGE_CREATE_PROTECTED_BIT"; + case VK_IMAGE_CREATE_DISJOINT_BIT: + return "VK_IMAGE_CREATE_DISJOINT_BIT"; + case VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV: + return "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV"; + case VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT: + return "VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT"; + case VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT: + return "VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT"; + case VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT: + return "VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT"; + case VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT: + return "VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT"; + case VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM: + return "VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM"; + case VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR: + return "VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR"; + default: + return "Unhandled VkImageCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageCreateFlags(VkImageCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSampleCountFlagBits(VkSampleCountFlagBits input_value) { + switch (input_value) { + case VK_SAMPLE_COUNT_1_BIT: + return "VK_SAMPLE_COUNT_1_BIT"; + case VK_SAMPLE_COUNT_2_BIT: + return "VK_SAMPLE_COUNT_2_BIT"; + case VK_SAMPLE_COUNT_4_BIT: + return "VK_SAMPLE_COUNT_4_BIT"; + case VK_SAMPLE_COUNT_8_BIT: + return "VK_SAMPLE_COUNT_8_BIT"; + case VK_SAMPLE_COUNT_16_BIT: + return "VK_SAMPLE_COUNT_16_BIT"; + case VK_SAMPLE_COUNT_32_BIT: + return "VK_SAMPLE_COUNT_32_BIT"; + case VK_SAMPLE_COUNT_64_BIT: + return "VK_SAMPLE_COUNT_64_BIT"; + default: + return "Unhandled VkSampleCountFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSampleCountFlags(VkSampleCountFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSampleCountFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSampleCountFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_USAGE_TRANSFER_SRC_BIT: + return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"; + case VK_IMAGE_USAGE_TRANSFER_DST_BIT: + return "VK_IMAGE_USAGE_TRANSFER_DST_BIT"; + case VK_IMAGE_USAGE_SAMPLED_BIT: + return "VK_IMAGE_USAGE_SAMPLED_BIT"; + case VK_IMAGE_USAGE_STORAGE_BIT: + return "VK_IMAGE_USAGE_STORAGE_BIT"; + case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT: + return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"; + case VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR"; + case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT: + return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT"; + case VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT: + return "VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR"; + case VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR: + return "VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR"; + case VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT: + return "VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + case VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI: + return "VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI"; + case VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM: + return "VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM"; + case VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM: + return "VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM"; + default: + return "Unhandled VkImageUsageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageUsageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageUsageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkInstanceCreateFlagBits(VkInstanceCreateFlagBits input_value) { + switch (input_value) { + case VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR: + return "VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR"; + default: + return "Unhandled VkInstanceCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkInstanceCreateFlags(VkInstanceCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkInstanceCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkInstanceCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_HEAP_DEVICE_LOCAL_BIT: + return "VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"; + case VK_MEMORY_HEAP_MULTI_INSTANCE_BIT: + return "VK_MEMORY_HEAP_MULTI_INSTANCE_BIT"; + default: + return "Unhandled VkMemoryHeapFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryHeapFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryHeapFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT: + return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"; + case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT: + return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"; + case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT: + return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"; + case VK_MEMORY_PROPERTY_HOST_CACHED_BIT: + return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT"; + case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT: + return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"; + case VK_MEMORY_PROPERTY_PROTECTED_BIT: + return "VK_MEMORY_PROPERTY_PROTECTED_BIT"; + case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD: + return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD"; + case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD: + return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD"; + case VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV: + return "VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV"; + default: + return "Unhandled VkMemoryPropertyFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryPropertyFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryPropertyFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value) { + switch (input_value) { + case VK_QUEUE_GRAPHICS_BIT: + return "VK_QUEUE_GRAPHICS_BIT"; + case VK_QUEUE_COMPUTE_BIT: + return "VK_QUEUE_COMPUTE_BIT"; + case VK_QUEUE_TRANSFER_BIT: + return "VK_QUEUE_TRANSFER_BIT"; + case VK_QUEUE_SPARSE_BINDING_BIT: + return "VK_QUEUE_SPARSE_BINDING_BIT"; + case VK_QUEUE_PROTECTED_BIT: + return "VK_QUEUE_PROTECTED_BIT"; + case VK_QUEUE_VIDEO_DECODE_BIT_KHR: + return "VK_QUEUE_VIDEO_DECODE_BIT_KHR"; + case VK_QUEUE_VIDEO_ENCODE_BIT_KHR: + return "VK_QUEUE_VIDEO_ENCODE_BIT_KHR"; + case VK_QUEUE_OPTICAL_FLOW_BIT_NV: + return "VK_QUEUE_OPTICAL_FLOW_BIT_NV"; + default: + return "Unhandled VkQueueFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueueFlags(VkQueueFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueueFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueueFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreateFlagBits input_value) { + switch (input_value) { + case VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT: + return "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT"; + default: + return "Unhandled VkDeviceQueueCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceQueueCreateFlags(VkDeviceQueueCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceQueueCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceQueueCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: + return "VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT"; + case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: + return "VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT"; + case VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: + return "VK_PIPELINE_STAGE_VERTEX_INPUT_BIT"; + case VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: + return "VK_PIPELINE_STAGE_VERTEX_SHADER_BIT"; + case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: + return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT"; + case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: + return "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT"; + case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: + return "VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT"; + case VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: + return "VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT"; + case VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: + return "VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT"; + case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: + return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"; + case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: + return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"; + case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: + return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"; + case VK_PIPELINE_STAGE_TRANSFER_BIT: + return "VK_PIPELINE_STAGE_TRANSFER_BIT"; + case VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT: + return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"; + case VK_PIPELINE_STAGE_HOST_BIT: + return "VK_PIPELINE_STAGE_HOST_BIT"; + case VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT: + return "VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT"; + case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT: + return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"; + case VK_PIPELINE_STAGE_NONE: + return "VK_PIPELINE_STAGE_NONE"; + case VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT: + return "VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT"; + case VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT: + return "VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT"; + case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR: + return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"; + case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR: + return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR"; + case VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT: + return "VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT"; + case VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV: + return "VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV"; + case VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT: + return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT"; + case VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT: + return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT"; + default: + return "Unhandled VkPipelineStageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineStageFlags(VkPipelineStageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineStageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineStageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryMapFlagBits(VkMemoryMapFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_MAP_PLACED_BIT_EXT: + return "VK_MEMORY_MAP_PLACED_BIT_EXT"; + default: + return "Unhandled VkMemoryMapFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryMapFlags(VkMemoryMapFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryMapFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryMapFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value) { + switch (input_value) { + case VK_SPARSE_MEMORY_BIND_METADATA_BIT: + return "VK_SPARSE_MEMORY_BIND_METADATA_BIT"; + default: + return "Unhandled VkSparseMemoryBindFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSparseMemoryBindFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSparseMemoryBindFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSparseImageFormatFlagBits(VkSparseImageFormatFlagBits input_value) { + switch (input_value) { + case VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT: + return "VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT"; + case VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT: + return "VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT"; + case VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT: + return "VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT"; + default: + return "Unhandled VkSparseImageFormatFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSparseImageFormatFlags(VkSparseImageFormatFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSparseImageFormatFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSparseImageFormatFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits input_value) { + switch (input_value) { + case VK_FENCE_CREATE_SIGNALED_BIT: + return "VK_FENCE_CREATE_SIGNALED_BIT"; + default: + return "Unhandled VkFenceCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFenceCreateFlags(VkFenceCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFenceCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFenceCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkEventCreateFlagBits(VkEventCreateFlagBits input_value) { + switch (input_value) { + case VK_EVENT_CREATE_DEVICE_ONLY_BIT: + return "VK_EVENT_CREATE_DEVICE_ONLY_BIT"; + default: + return "Unhandled VkEventCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkEventCreateFlags(VkEventCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkEventCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkEventCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelineStatisticFlagBits input_value) { + switch (input_value) { + case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT: + return "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT"; + case VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT: + return "VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT"; + case VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT: + return "VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT"; + case VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI: + return "VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI"; + default: + return "Unhandled VkQueryPipelineStatisticFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueryPipelineStatisticFlags(VkQueryPipelineStatisticFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueryPipelineStatisticFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueryResultFlagBits(VkQueryResultFlagBits input_value) { + switch (input_value) { + case VK_QUERY_RESULT_64_BIT: + return "VK_QUERY_RESULT_64_BIT"; + case VK_QUERY_RESULT_WAIT_BIT: + return "VK_QUERY_RESULT_WAIT_BIT"; + case VK_QUERY_RESULT_WITH_AVAILABILITY_BIT: + return "VK_QUERY_RESULT_WITH_AVAILABILITY_BIT"; + case VK_QUERY_RESULT_PARTIAL_BIT: + return "VK_QUERY_RESULT_PARTIAL_BIT"; + case VK_QUERY_RESULT_WITH_STATUS_BIT_KHR: + return "VK_QUERY_RESULT_WITH_STATUS_BIT_KHR"; + default: + return "Unhandled VkQueryResultFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueryResultFlags(VkQueryResultFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueryResultFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueryResultFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBufferCreateFlagBits(VkBufferCreateFlagBits input_value) { + switch (input_value) { + case VK_BUFFER_CREATE_SPARSE_BINDING_BIT: + return "VK_BUFFER_CREATE_SPARSE_BINDING_BIT"; + case VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT: + return "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"; + case VK_BUFFER_CREATE_SPARSE_ALIASED_BIT: + return "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"; + case VK_BUFFER_CREATE_PROTECTED_BIT: + return "VK_BUFFER_CREATE_PROTECTED_BIT"; + case VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT: + return "VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT"; + case VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR: + return "VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR"; + default: + return "Unhandled VkBufferCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBufferCreateFlags(VkBufferCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBufferCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBufferCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits input_value) { + switch (input_value) { + case VK_BUFFER_USAGE_TRANSFER_SRC_BIT: + return "VK_BUFFER_USAGE_TRANSFER_SRC_BIT"; + case VK_BUFFER_USAGE_TRANSFER_DST_BIT: + return "VK_BUFFER_USAGE_TRANSFER_DST_BIT"; + case VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT: + return "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"; + case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT: + return "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"; + case VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT: + return "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"; + case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT: + return "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"; + case VK_BUFFER_USAGE_INDEX_BUFFER_BIT: + return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT"; + case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT: + return "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT"; + case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT: + return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"; + case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT: + return "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT"; + case VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR"; + case VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR"; + case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT: + return "VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX: + return "VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR: + return "VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR"; + case VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR: + return "VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR"; + case VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR: + return "VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR"; + case VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR"; + case VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR: + return "VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR"; + case VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT: + return "VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT"; + case VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT: + return "VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT"; + default: + return "Unhandled VkBufferUsageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBufferUsageFlags(VkBufferUsageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBufferUsageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBufferUsageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageViewCreateFlagBits(VkImageViewCreateFlagBits input_value) { + switch (input_value) { + case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT"; + case VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT: + return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT"; + default: + return "Unhandled VkImageViewCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageViewCreateFlags(VkImageViewCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageViewCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageViewCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCacheCreateFlagBits(VkPipelineCacheCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT: + return "VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT"; + default: + return "Unhandled VkPipelineCacheCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCacheCreateFlags(VkPipelineCacheCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCacheCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCacheCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkColorComponentFlagBits(VkColorComponentFlagBits input_value) { + switch (input_value) { + case VK_COLOR_COMPONENT_R_BIT: + return "VK_COLOR_COMPONENT_R_BIT"; + case VK_COLOR_COMPONENT_G_BIT: + return "VK_COLOR_COMPONENT_G_BIT"; + case VK_COLOR_COMPONENT_B_BIT: + return "VK_COLOR_COMPONENT_B_BIT"; + case VK_COLOR_COMPONENT_A_BIT: + return "VK_COLOR_COMPONENT_A_BIT"; + default: + return "Unhandled VkColorComponentFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkColorComponentFlags(VkColorComponentFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkColorComponentFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkColorComponentFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT: + return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"; + case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT: + return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"; + case VK_PIPELINE_CREATE_DERIVATIVE_BIT: + return "VK_PIPELINE_CREATE_DERIVATIVE_BIT"; + case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT: + return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"; + case VK_PIPELINE_CREATE_DISPATCH_BASE_BIT: + return "VK_PIPELINE_CREATE_DISPATCH_BASE_BIT"; + case VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT: + return "VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT"; + case VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT: + return "VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR"; + case VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR: + return "VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR"; + case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV: + return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV"; + case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT: + return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; + case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR: + return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR"; + case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR: + return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"; + case VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV: + return "VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV"; + case VK_PIPELINE_CREATE_LIBRARY_BIT_KHR: + return "VK_PIPELINE_CREATE_LIBRARY_BIT_KHR"; + case VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT: + return "VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT"; + case VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT: + return "VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT"; + case VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV: + return "VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV"; + case VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT: + return "VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + case VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT: + return "VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + case VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT: + return "VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV: + return "VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT: + return "VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT"; + case VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT: + return "VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT"; + default: + return "Unhandled VkPipelineCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT: + return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT"; + case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT: + return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT"; + default: + return "Unhandled VkPipelineShaderStageCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineShaderStageCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value) { + switch (input_value) { + case VK_SHADER_STAGE_VERTEX_BIT: + return "VK_SHADER_STAGE_VERTEX_BIT"; + case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: + return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"; + case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: + return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"; + case VK_SHADER_STAGE_GEOMETRY_BIT: + return "VK_SHADER_STAGE_GEOMETRY_BIT"; + case VK_SHADER_STAGE_FRAGMENT_BIT: + return "VK_SHADER_STAGE_FRAGMENT_BIT"; + case VK_SHADER_STAGE_COMPUTE_BIT: + return "VK_SHADER_STAGE_COMPUTE_BIT"; + case VK_SHADER_STAGE_RAYGEN_BIT_KHR: + return "VK_SHADER_STAGE_RAYGEN_BIT_KHR"; + case VK_SHADER_STAGE_ANY_HIT_BIT_KHR: + return "VK_SHADER_STAGE_ANY_HIT_BIT_KHR"; + case VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR: + return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR"; + case VK_SHADER_STAGE_MISS_BIT_KHR: + return "VK_SHADER_STAGE_MISS_BIT_KHR"; + case VK_SHADER_STAGE_INTERSECTION_BIT_KHR: + return "VK_SHADER_STAGE_INTERSECTION_BIT_KHR"; + case VK_SHADER_STAGE_CALLABLE_BIT_KHR: + return "VK_SHADER_STAGE_CALLABLE_BIT_KHR"; + case VK_SHADER_STAGE_TASK_BIT_EXT: + return "VK_SHADER_STAGE_TASK_BIT_EXT"; + case VK_SHADER_STAGE_MESH_BIT_EXT: + return "VK_SHADER_STAGE_MESH_BIT_EXT"; + case VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI: + return "VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI"; + case VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI: + return "VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI"; + default: + return "Unhandled VkShaderStageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value) { + if (input_value == VK_SHADER_STAGE_ALL_GRAPHICS) { return "VK_SHADER_STAGE_ALL_GRAPHICS"; } + if (input_value == VK_SHADER_STAGE_ALL) { return "VK_SHADER_STAGE_ALL"; } + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkShaderStageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkShaderStageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_value) { + switch (input_value) { + case VK_CULL_MODE_NONE: + return "VK_CULL_MODE_NONE"; + case VK_CULL_MODE_FRONT_BIT: + return "VK_CULL_MODE_FRONT_BIT"; + case VK_CULL_MODE_BACK_BIT: + return "VK_CULL_MODE_BACK_BIT"; + default: + return "Unhandled VkCullModeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value) { + if (input_value == VK_CULL_MODE_FRONT_AND_BACK) { return "VK_CULL_MODE_FRONT_AND_BACK"; } + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCullModeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCullModeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineDepthStencilStateCreateFlagBits(VkPipelineDepthStencilStateCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT: + return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT"; + case VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT: + return "VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT"; + default: + return "Unhandled VkPipelineDepthStencilStateCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineDepthStencilStateCreateFlags(VkPipelineDepthStencilStateCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineDepthStencilStateCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineDepthStencilStateCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineColorBlendStateCreateFlagBits(VkPipelineColorBlendStateCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT: + return "VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT"; + default: + return "Unhandled VkPipelineColorBlendStateCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineColorBlendStateCreateFlags(VkPipelineColorBlendStateCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineColorBlendStateCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineColorBlendStateCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineLayoutCreateFlagBits(VkPipelineLayoutCreateFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT: + return "VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT"; + default: + return "Unhandled VkPipelineLayoutCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineLayoutCreateFlags(VkPipelineLayoutCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineLayoutCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineLayoutCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits input_value) { + switch (input_value) { + case VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT: + return "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT"; + case VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT: + return "VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT"; + case VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT: + return "VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT"; + case VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM: + return "VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM"; + default: + return "Unhandled VkSamplerCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSamplerCreateFlags(VkSamplerCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSamplerCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSamplerCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPoolCreateFlagBits input_value) { + switch (input_value) { + case VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT: + return "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT"; + case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT: + return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT"; + case VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT: + return "VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT"; + case VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV: + return "VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV"; + case VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV: + return "VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV"; + default: + return "Unhandled VkDescriptorPoolCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDescriptorPoolCreateFlags(VkDescriptorPoolCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDescriptorPoolCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescriptorSetLayoutCreateFlagBits input_value) { + switch (input_value) { + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT"; + case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV: + return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV"; + default: + return "Unhandled VkDescriptorSetLayoutCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDescriptorSetLayoutCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAttachmentDescriptionFlagBits(VkAttachmentDescriptionFlagBits input_value) { + switch (input_value) { + case VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT: + return "VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT"; + default: + return "Unhandled VkAttachmentDescriptionFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkAttachmentDescriptionFlags(VkAttachmentDescriptionFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAttachmentDescriptionFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAttachmentDescriptionFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input_value) { + switch (input_value) { + case VK_DEPENDENCY_BY_REGION_BIT: + return "VK_DEPENDENCY_BY_REGION_BIT"; + case VK_DEPENDENCY_DEVICE_GROUP_BIT: + return "VK_DEPENDENCY_DEVICE_GROUP_BIT"; + case VK_DEPENDENCY_VIEW_LOCAL_BIT: + return "VK_DEPENDENCY_VIEW_LOCAL_BIT"; + case VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT: + return "VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT"; + default: + return "Unhandled VkDependencyFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDependencyFlags(VkDependencyFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDependencyFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDependencyFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value) { + switch (input_value) { + case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT: + return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT"; + default: + return "Unhandled VkFramebufferCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFramebufferCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFramebufferCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkRenderPassCreateFlagBits(VkRenderPassCreateFlagBits input_value) { + switch (input_value) { + case VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM: + return "VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM"; + default: + return "Unhandled VkRenderPassCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkRenderPassCreateFlags(VkRenderPassCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkRenderPassCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkRenderPassCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescriptionFlagBits input_value) { + switch (input_value) { + case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX: + return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX"; + case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX: + return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX"; + case VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM"; + case VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM: + return "VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM"; + case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT"; + case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT"; + case VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT"; + case VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT: + return "VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT"; + default: + return "Unhandled VkSubpassDescriptionFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubpassDescriptionFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSubpassDescriptionFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandPoolCreateFlagBits(VkCommandPoolCreateFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_POOL_CREATE_TRANSIENT_BIT: + return "VK_COMMAND_POOL_CREATE_TRANSIENT_BIT"; + case VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT: + return "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT"; + case VK_COMMAND_POOL_CREATE_PROTECTED_BIT: + return "VK_COMMAND_POOL_CREATE_PROTECTED_BIT"; + default: + return "Unhandled VkCommandPoolCreateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandPoolCreateFlags(VkCommandPoolCreateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandPoolCreateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandPoolCreateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandPoolResetFlagBits(VkCommandPoolResetFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT: + return "VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT"; + default: + return "Unhandled VkCommandPoolResetFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandPoolResetFlags(VkCommandPoolResetFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandPoolResetFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandPoolResetFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandBufferUsageFlagBits(VkCommandBufferUsageFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT: + return "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT"; + case VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT: + return "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT"; + case VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT: + return "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT"; + default: + return "Unhandled VkCommandBufferUsageFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandBufferUsageFlags(VkCommandBufferUsageFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandBufferUsageFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandBufferUsageFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkQueryControlFlagBits(VkQueryControlFlagBits input_value) { + switch (input_value) { + case VK_QUERY_CONTROL_PRECISE_BIT: + return "VK_QUERY_CONTROL_PRECISE_BIT"; + default: + return "Unhandled VkQueryControlFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkQueryControlFlags(VkQueryControlFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkQueryControlFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkQueryControlFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCommandBufferResetFlagBits(VkCommandBufferResetFlagBits input_value) { + switch (input_value) { + case VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT: + return "VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"; + default: + return "Unhandled VkCommandBufferResetFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCommandBufferResetFlags(VkCommandBufferResetFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCommandBufferResetFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCommandBufferResetFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits input_value) { + switch (input_value) { + case VK_STENCIL_FACE_FRONT_BIT: + return "VK_STENCIL_FACE_FRONT_BIT"; + case VK_STENCIL_FACE_BACK_BIT: + return "VK_STENCIL_FACE_BACK_BIT"; + default: + return "Unhandled VkStencilFaceFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkStencilFaceFlags(VkStencilFaceFlags input_value) { + if (input_value == VK_STENCIL_FACE_FRONT_AND_BACK) { return "VK_STENCIL_FACE_FRONT_AND_BACK"; } + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkStencilFaceFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkStencilFaceFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSubgroupFeatureFlagBits(VkSubgroupFeatureFlagBits input_value) { + switch (input_value) { + case VK_SUBGROUP_FEATURE_BASIC_BIT: + return "VK_SUBGROUP_FEATURE_BASIC_BIT"; + case VK_SUBGROUP_FEATURE_VOTE_BIT: + return "VK_SUBGROUP_FEATURE_VOTE_BIT"; + case VK_SUBGROUP_FEATURE_ARITHMETIC_BIT: + return "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT"; + case VK_SUBGROUP_FEATURE_BALLOT_BIT: + return "VK_SUBGROUP_FEATURE_BALLOT_BIT"; + case VK_SUBGROUP_FEATURE_SHUFFLE_BIT: + return "VK_SUBGROUP_FEATURE_SHUFFLE_BIT"; + case VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT: + return "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT"; + case VK_SUBGROUP_FEATURE_CLUSTERED_BIT: + return "VK_SUBGROUP_FEATURE_CLUSTERED_BIT"; + case VK_SUBGROUP_FEATURE_QUAD_BIT: + return "VK_SUBGROUP_FEATURE_QUAD_BIT"; + case VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV: + return "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV"; + case VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR: + return "VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR"; + case VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR: + return "VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR"; + default: + return "Unhandled VkSubgroupFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSubgroupFeatureFlags(VkSubgroupFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubgroupFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSubgroupFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPeerMemoryFeatureFlagBits(VkPeerMemoryFeatureFlagBits input_value) { + switch (input_value) { + case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT: + return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT"; + case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT: + return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT"; + case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT: + return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT"; + case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT: + return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT"; + default: + return "Unhandled VkPeerMemoryFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPeerMemoryFeatureFlags(VkPeerMemoryFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPeerMemoryFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryAllocateFlagBits(VkMemoryAllocateFlagBits input_value) { + switch (input_value) { + case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT: + return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT"; + case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT: + return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT"; + case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT: + return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT"; + default: + return "Unhandled VkMemoryAllocateFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryAllocateFlags(VkMemoryAllocateFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryAllocateFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryAllocateFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryHandleTypeFlagBits(VkExternalMemoryHandleTypeFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX"; + default: + return "Unhandled VkExternalMemoryHandleTypeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryHandleTypeFlags(VkExternalMemoryHandleTypeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryHandleTypeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryFeatureFlagBits(VkExternalMemoryFeatureFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT: + return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT"; + case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT: + return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT"; + case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT: + return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT"; + default: + return "Unhandled VkExternalMemoryFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryFeatureFlags(VkExternalMemoryFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalFenceHandleTypeFlagBits(VkExternalFenceHandleTypeFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT"; + case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT"; + case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"; + case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT: + return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT"; + default: + return "Unhandled VkExternalFenceHandleTypeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalFenceHandleTypeFlags(VkExternalFenceHandleTypeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalFenceHandleTypeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalFenceFeatureFlagBits(VkExternalFenceFeatureFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT: + return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT"; + case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT: + return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT"; + default: + return "Unhandled VkExternalFenceFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalFenceFeatureFlags(VkExternalFenceFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalFenceFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalFenceFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFenceImportFlagBits(VkFenceImportFlagBits input_value) { + switch (input_value) { + case VK_FENCE_IMPORT_TEMPORARY_BIT: + return "VK_FENCE_IMPORT_TEMPORARY_BIT"; + default: + return "Unhandled VkFenceImportFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFenceImportFlags(VkFenceImportFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFenceImportFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFenceImportFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSemaphoreImportFlagBits(VkSemaphoreImportFlagBits input_value) { + switch (input_value) { + case VK_SEMAPHORE_IMPORT_TEMPORARY_BIT: + return "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT"; + default: + return "Unhandled VkSemaphoreImportFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSemaphoreImportFlags(VkSemaphoreImportFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSemaphoreImportFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSemaphoreImportFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalSemaphoreHandleTypeFlagBits(VkExternalSemaphoreHandleTypeFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT"; + case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA: + return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA"; + default: + return "Unhandled VkExternalSemaphoreHandleTypeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalSemaphoreHandleTypeFlags(VkExternalSemaphoreHandleTypeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalSemaphoreHandleTypeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalSemaphoreFeatureFlagBits(VkExternalSemaphoreFeatureFlagBits input_value) { + switch (input_value) { + case VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT: + return "VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT"; + case VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT: + return "VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT"; + default: + return "Unhandled VkExternalSemaphoreFeatureFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalSemaphoreFeatureFlags(VkExternalSemaphoreFeatureFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalSemaphoreFeatureFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkResolveModeFlagBits(VkResolveModeFlagBits input_value) { + switch (input_value) { + case VK_RESOLVE_MODE_NONE: + return "VK_RESOLVE_MODE_NONE"; + case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT: + return "VK_RESOLVE_MODE_SAMPLE_ZERO_BIT"; + case VK_RESOLVE_MODE_AVERAGE_BIT: + return "VK_RESOLVE_MODE_AVERAGE_BIT"; + case VK_RESOLVE_MODE_MIN_BIT: + return "VK_RESOLVE_MODE_MIN_BIT"; + case VK_RESOLVE_MODE_MAX_BIT: + return "VK_RESOLVE_MODE_MAX_BIT"; + case VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID: + return "VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID"; + default: + return "Unhandled VkResolveModeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkResolveModeFlags(VkResolveModeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkResolveModeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkResolveModeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDescriptorBindingFlagBits(VkDescriptorBindingFlagBits input_value) { + switch (input_value) { + case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT: + return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT"; + case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT: + return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT"; + case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT: + return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT"; + case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT: + return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"; + default: + return "Unhandled VkDescriptorBindingFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDescriptorBindingFlags(VkDescriptorBindingFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDescriptorBindingFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDescriptorBindingFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSemaphoreWaitFlagBits(VkSemaphoreWaitFlagBits input_value) { + switch (input_value) { + case VK_SEMAPHORE_WAIT_ANY_BIT: + return "VK_SEMAPHORE_WAIT_ANY_BIT"; + default: + return "Unhandled VkSemaphoreWaitFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSemaphoreWaitFlags(VkSemaphoreWaitFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSemaphoreWaitFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSemaphoreWaitFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCreationFeedbackFlagBits(VkPipelineCreationFeedbackFlagBits input_value) { + switch (input_value) { + case VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT: + return "VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT"; + case VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT: + return "VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT"; + case VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT: + return "VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT"; + default: + return "Unhandled VkPipelineCreationFeedbackFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCreationFeedbackFlags(VkPipelineCreationFeedbackFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreationFeedbackFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCreationFeedbackFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkToolPurposeFlagBits(VkToolPurposeFlagBits input_value) { + switch (input_value) { + case VK_TOOL_PURPOSE_VALIDATION_BIT: + return "VK_TOOL_PURPOSE_VALIDATION_BIT"; + case VK_TOOL_PURPOSE_PROFILING_BIT: + return "VK_TOOL_PURPOSE_PROFILING_BIT"; + case VK_TOOL_PURPOSE_TRACING_BIT: + return "VK_TOOL_PURPOSE_TRACING_BIT"; + case VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT: + return "VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT"; + case VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT: + return "VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT"; + case VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT: + return "VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT"; + case VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT: + return "VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT"; + default: + return "Unhandled VkToolPurposeFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkToolPurposeFlags(VkToolPurposeFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkToolPurposeFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkToolPurposeFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineStageFlagBits2(uint64_t input_value) { + if (input_value == VK_PIPELINE_STAGE_2_NONE) return "VK_PIPELINE_STAGE_2_NONE"; + if (input_value == VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT) return "VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT) return "VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT) return "VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT) return "VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT) return "VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT) return "VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT) return "VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT) return "VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT) return "VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT) return "VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT) return "VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT) return "VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT) return "VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT) return "VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_HOST_BIT) return "VK_PIPELINE_STAGE_2_HOST_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT) return "VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT) return "VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_COPY_BIT) return "VK_PIPELINE_STAGE_2_COPY_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_RESOLVE_BIT) return "VK_PIPELINE_STAGE_2_RESOLVE_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_BLIT_BIT) return "VK_PIPELINE_STAGE_2_BLIT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_CLEAR_BIT) return "VK_PIPELINE_STAGE_2_CLEAR_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT) return "VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT) return "VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT) return "VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT"; + if (input_value == VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR) return "VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR) return "VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT) return "VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT) return "VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV) return "VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV"; + if (input_value == VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR) return "VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR) return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR) return "VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT) return "VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT) return "VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT) return "VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI) return "VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI"; + if (input_value == VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI) return "VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI"; + if (input_value == VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR) return "VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR"; + if (input_value == VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT) return "VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT"; + if (input_value == VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI) return "VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI"; + if (input_value == VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV) return "VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV"; + return "Unhandled VkPipelineStageFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineStageFlags2(VkPipelineStageFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineStageFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineStageFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAccessFlagBits2(uint64_t input_value) { + if (input_value == VK_ACCESS_2_NONE) return "VK_ACCESS_2_NONE"; + if (input_value == VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT) return "VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT"; + if (input_value == VK_ACCESS_2_INDEX_READ_BIT) return "VK_ACCESS_2_INDEX_READ_BIT"; + if (input_value == VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT) return "VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT"; + if (input_value == VK_ACCESS_2_UNIFORM_READ_BIT) return "VK_ACCESS_2_UNIFORM_READ_BIT"; + if (input_value == VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT) return "VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_READ_BIT) return "VK_ACCESS_2_SHADER_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_WRITE_BIT) return "VK_ACCESS_2_SHADER_WRITE_BIT"; + if (input_value == VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT) return "VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT"; + if (input_value == VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT) return "VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT"; + if (input_value == VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT) return "VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT"; + if (input_value == VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT) return "VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"; + if (input_value == VK_ACCESS_2_TRANSFER_READ_BIT) return "VK_ACCESS_2_TRANSFER_READ_BIT"; + if (input_value == VK_ACCESS_2_TRANSFER_WRITE_BIT) return "VK_ACCESS_2_TRANSFER_WRITE_BIT"; + if (input_value == VK_ACCESS_2_HOST_READ_BIT) return "VK_ACCESS_2_HOST_READ_BIT"; + if (input_value == VK_ACCESS_2_HOST_WRITE_BIT) return "VK_ACCESS_2_HOST_WRITE_BIT"; + if (input_value == VK_ACCESS_2_MEMORY_READ_BIT) return "VK_ACCESS_2_MEMORY_READ_BIT"; + if (input_value == VK_ACCESS_2_MEMORY_WRITE_BIT) return "VK_ACCESS_2_MEMORY_WRITE_BIT"; + if (input_value == VK_ACCESS_2_SHADER_SAMPLED_READ_BIT) return "VK_ACCESS_2_SHADER_SAMPLED_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_STORAGE_READ_BIT) return "VK_ACCESS_2_SHADER_STORAGE_READ_BIT"; + if (input_value == VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT) return "VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT"; + if (input_value == VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR) return "VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR) return "VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR"; + if (input_value == VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR) return "VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR) return "VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR"; + if (input_value == VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT) return "VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT) return "VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT) return "VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT) return "VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV) return "VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV"; + if (input_value == VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV) return "VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV"; + if (input_value == VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR) return "VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR) return "VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR) return "VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR"; + if (input_value == VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT) return "VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT) return "VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"; + if (input_value == VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT) return "VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI) return "VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI"; + if (input_value == VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR) return "VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR"; + if (input_value == VK_ACCESS_2_MICROMAP_READ_BIT_EXT) return "VK_ACCESS_2_MICROMAP_READ_BIT_EXT"; + if (input_value == VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT) return "VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT"; + if (input_value == VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV) return "VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV"; + if (input_value == VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV) return "VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV"; + return "Unhandled VkAccessFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkAccessFlags2(VkAccessFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccessFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccessFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSubmitFlagBits(VkSubmitFlagBits input_value) { + switch (input_value) { + case VK_SUBMIT_PROTECTED_BIT: + return "VK_SUBMIT_PROTECTED_BIT"; + default: + return "Unhandled VkSubmitFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSubmitFlags(VkSubmitFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSubmitFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSubmitFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkRenderingFlagBits(VkRenderingFlagBits input_value) { + switch (input_value) { + case VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT: + return "VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT"; + case VK_RENDERING_SUSPENDING_BIT: + return "VK_RENDERING_SUSPENDING_BIT"; + case VK_RENDERING_RESUMING_BIT: + return "VK_RENDERING_RESUMING_BIT"; + case VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT: + return "VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT"; + case VK_RENDERING_CONTENTS_INLINE_BIT_KHR: + return "VK_RENDERING_CONTENTS_INLINE_BIT_KHR"; + default: + return "Unhandled VkRenderingFlagBits"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkRenderingFlags(VkRenderingFlags input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkRenderingFlagBits(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkRenderingFlags(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkFormatFeatureFlagBits2(uint64_t input_value) { + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT) return "VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT) return "VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT) return "VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT) return "VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT) return "VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_BLIT_SRC_BIT) return "VK_FORMAT_FEATURE_2_BLIT_SRC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_BLIT_DST_BIT) return "VK_FORMAT_FEATURE_2_BLIT_DST_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT) return "VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT) return "VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT) return "VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_DISJOINT_BIT) return "VK_FORMAT_FEATURE_2_DISJOINT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT) return "VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT) return "VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT) return "VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR) return "VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT) return "VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT"; + if (input_value == VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR) return "VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT) return "VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR) return "VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR"; + if (input_value == VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV) return "VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM) return "VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM) return "VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM) return "VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM) return "VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM"; + if (input_value == VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV) return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV) return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV"; + if (input_value == VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV) return "VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV"; + return "Unhandled VkFormatFeatureFlagBits2"; +} + +#ifdef __cplusplus +static inline std::string string_VkFormatFeatureFlags2(VkFormatFeatureFlags2 input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFormatFeatureFlagBits2(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFormatFeatureFlags2(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSurfaceTransformFlagBitsKHR(VkSurfaceTransformFlagBitsKHR input_value) { + switch (input_value) { + case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR: + return "VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR"; + case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR: + return "VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR"; + case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR: + return "VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR"; + case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR: + return "VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR"; + case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR: + return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR"; + case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR: + return "VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR"; + default: + return "Unhandled VkSurfaceTransformFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSurfaceTransformFlagsKHR(VkSurfaceTransformFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSurfaceTransformFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value) { + switch (input_value) { + case VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR: + return "VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR"; + case VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR: + return "VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR"; + case VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR: + return "VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR"; + case VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR: + return "VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR"; + default: + return "Unhandled VkCompositeAlphaFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkCompositeAlphaFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR: + return "VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR"; + case VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR: + return "VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR"; + case VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR: + return "VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR"; + case VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT: + return "VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT"; + default: + return "Unhandled VkSwapchainCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSwapchainCreateFlagsKHR(VkSwapchainCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSwapchainCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceGroupPresentModeFlagBitsKHR(VkDeviceGroupPresentModeFlagBitsKHR input_value) { + switch (input_value) { + case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR"; + case VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR"; + case VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR"; + case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR: + return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR"; + default: + return "Unhandled VkDeviceGroupPresentModeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceGroupPresentModeFlagsKHR(VkDeviceGroupPresentModeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceGroupPresentModeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDisplayPlaneAlphaFlagBitsKHR(VkDisplayPlaneAlphaFlagBitsKHR input_value) { + switch (input_value) { + case VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR"; + case VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR"; + case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR"; + case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR: + return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR"; + default: + return "Unhandled VkDisplayPlaneAlphaFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDisplayPlaneAlphaFlagsKHR(VkDisplayPlaneAlphaFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDisplayPlaneAlphaFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoCodecOperationFlagBitsKHR(VkVideoCodecOperationFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CODEC_OPERATION_NONE_KHR: + return "VK_VIDEO_CODEC_OPERATION_NONE_KHR"; + case VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR"; + case VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR: + return "VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR"; + default: + return "Unhandled VkVideoCodecOperationFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoCodecOperationFlagsKHR(VkVideoCodecOperationFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoCodecOperationFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoCodecOperationFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoChromaSubsamplingFlagBitsKHR(VkVideoChromaSubsamplingFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR"; + case VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR: + return "VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR"; + default: + return "Unhandled VkVideoChromaSubsamplingFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoChromaSubsamplingFlagsKHR(VkVideoChromaSubsamplingFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoChromaSubsamplingFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoChromaSubsamplingFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoComponentBitDepthFlagBitsKHR(VkVideoComponentBitDepthFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR"; + case VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR"; + case VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR"; + case VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR: + return "VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR"; + default: + return "Unhandled VkVideoComponentBitDepthFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoComponentBitDepthFlagsKHR(VkVideoComponentBitDepthFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoComponentBitDepthFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoComponentBitDepthFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoCapabilityFlagBitsKHR(VkVideoCapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR: + return "VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR"; + case VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR: + return "VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR"; + default: + return "Unhandled VkVideoCapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoCapabilityFlagsKHR(VkVideoCapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoCapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoCapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoSessionCreateFlagBitsKHR(VkVideoSessionCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR"; + case VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR: + return "VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR"; + default: + return "Unhandled VkVideoSessionCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoSessionCreateFlagsKHR(VkVideoSessionCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoSessionCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoSessionCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoCodingControlFlagBitsKHR(VkVideoCodingControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR: + return "VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR"; + case VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR: + return "VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR"; + case VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR: + return "VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR"; + default: + return "Unhandled VkVideoCodingControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoCodingControlFlagsKHR(VkVideoCodingControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoCodingControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoCodingControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoDecodeCapabilityFlagBitsKHR(VkVideoDecodeCapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR: + return "VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR"; + case VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR: + return "VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR"; + default: + return "Unhandled VkVideoDecodeCapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoDecodeCapabilityFlagsKHR(VkVideoDecodeCapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoDecodeCapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoDecodeCapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoDecodeUsageFlagBitsKHR(VkVideoDecodeUsageFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_DECODE_USAGE_DEFAULT_KHR: + return "VK_VIDEO_DECODE_USAGE_DEFAULT_KHR"; + case VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR: + return "VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR"; + case VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR: + return "VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR"; + case VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR: + return "VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR"; + default: + return "Unhandled VkVideoDecodeUsageFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoDecodeUsageFlagsKHR(VkVideoDecodeUsageFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoDecodeUsageFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoDecodeUsageFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH264CapabilityFlagBitsKHR(VkVideoEncodeH264CapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH264CapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH264CapabilityFlagsKHR(VkVideoEncodeH264CapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH264CapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH264CapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH264StdFlagBitsKHR(VkVideoEncodeH264StdFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH264StdFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH264StdFlagsKHR(VkVideoEncodeH264StdFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH264StdFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH264StdFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH264RateControlFlagBitsKHR(VkVideoEncodeH264RateControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR"; + case VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH264RateControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH264RateControlFlagsKHR(VkVideoEncodeH264RateControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH264RateControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH264RateControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265CapabilityFlagBitsKHR(VkVideoEncodeH265CapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265CapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265CapabilityFlagsKHR(VkVideoEncodeH265CapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265CapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265CapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265StdFlagBitsKHR(VkVideoEncodeH265StdFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265StdFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265StdFlagsKHR(VkVideoEncodeH265StdFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265StdFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265StdFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265CtbSizeFlagBitsKHR(VkVideoEncodeH265CtbSizeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265CtbSizeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265CtbSizeFlagsKHR(VkVideoEncodeH265CtbSizeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265CtbSizeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265CtbSizeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265TransformBlockSizeFlagBitsKHR(VkVideoEncodeH265TransformBlockSizeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265TransformBlockSizeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265TransformBlockSizeFlagsKHR(VkVideoEncodeH265TransformBlockSizeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265TransformBlockSizeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265TransformBlockSizeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeH265RateControlFlagBitsKHR(VkVideoEncodeH265RateControlFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR"; + case VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR: + return "VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeH265RateControlFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeH265RateControlFlagsKHR(VkVideoEncodeH265RateControlFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeH265RateControlFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeH265RateControlFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoDecodeH264PictureLayoutFlagBitsKHR(VkVideoDecodeH264PictureLayoutFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR: + return "VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR"; + case VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR: + return "VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR"; + case VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR: + return "VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR"; + default: + return "Unhandled VkVideoDecodeH264PictureLayoutFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoDecodeH264PictureLayoutFlagsKHR(VkVideoDecodeH264PictureLayoutFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoDecodeH264PictureLayoutFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoDecodeH264PictureLayoutFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPerformanceCounterDescriptionFlagBitsKHR(VkPerformanceCounterDescriptionFlagBitsKHR input_value) { + switch (input_value) { + case VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR: + return "VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR"; + case VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR: + return "VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR"; + default: + return "Unhandled VkPerformanceCounterDescriptionFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPerformanceCounterDescriptionFlagsKHR(VkPerformanceCounterDescriptionFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPerformanceCounterDescriptionFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPerformanceCounterDescriptionFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryUnmapFlagBitsKHR(VkMemoryUnmapFlagBitsKHR input_value) { + switch (input_value) { + case VK_MEMORY_UNMAP_RESERVE_BIT_EXT: + return "VK_MEMORY_UNMAP_RESERVE_BIT_EXT"; + default: + return "Unhandled VkMemoryUnmapFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryUnmapFlagsKHR(VkMemoryUnmapFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryUnmapFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryUnmapFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeCapabilityFlagBitsKHR(VkVideoEncodeCapabilityFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR: + return "VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR"; + case VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR: + return "VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeCapabilityFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeCapabilityFlagsKHR(VkVideoEncodeCapabilityFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeCapabilityFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeCapabilityFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeRateControlModeFlagBitsKHR(VkVideoEncodeRateControlModeFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR"; + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR"; + case VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR: + return "VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeRateControlModeFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeRateControlModeFlagsKHR(VkVideoEncodeRateControlModeFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeRateControlModeFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeRateControlModeFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeFeedbackFlagBitsKHR(VkVideoEncodeFeedbackFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR: + return "VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR"; + case VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR: + return "VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR"; + case VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR: + return "VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeFeedbackFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeFeedbackFlagsKHR(VkVideoEncodeFeedbackFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeFeedbackFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeFeedbackFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeUsageFlagBitsKHR(VkVideoEncodeUsageFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR"; + case VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR"; + case VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR"; + case VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR: + return "VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeUsageFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeUsageFlagsKHR(VkVideoEncodeUsageFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeUsageFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeUsageFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkVideoEncodeContentFlagBitsKHR(VkVideoEncodeContentFlagBitsKHR input_value) { + switch (input_value) { + case VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR"; + case VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR"; + case VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR"; + case VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR: + return "VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR"; + default: + return "Unhandled VkVideoEncodeContentFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkVideoEncodeContentFlagsKHR(VkVideoEncodeContentFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkVideoEncodeContentFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkVideoEncodeContentFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPipelineCreateFlagBits2KHR(uint64_t input_value) { + if (input_value == VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR) return "VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR) return "VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR) return "VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + //if (input_value == VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX) return "VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT) return "VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR) return "VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR) return "VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV) return "VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR) return "VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR) return "VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR) return "VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR) return "VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT) return "VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT) return "VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR) return "VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR) return "VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV) return "VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV) return "VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR) return "VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT) return "VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT) return "VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT) return "VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT) return "VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT) return "VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT) return "VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV) return "VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV"; + if (input_value == VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR) return "VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR"; + if (input_value == VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT) return "VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT"; + return "Unhandled VkPipelineCreateFlagBits2KHR"; +} + +#ifdef __cplusplus +static inline std::string string_VkPipelineCreateFlags2KHR(VkPipelineCreateFlags2KHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPipelineCreateFlagBits2KHR(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPipelineCreateFlags2KHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBufferUsageFlagBits2KHR(uint64_t input_value) { + if (input_value == VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR) return "VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR) return "VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX) return "VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX"; +#endif // VK_ENABLE_BETA_EXTENSIONS + if (input_value == VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT) return "VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR) return "VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR) return "VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR) return "VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR) return "VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR) return "VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR"; + if (input_value == VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT) return "VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT) return "VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT"; + if (input_value == VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT"; + return "Unhandled VkBufferUsageFlagBits2KHR"; +} + +#ifdef __cplusplus +static inline std::string string_VkBufferUsageFlags2KHR(VkBufferUsageFlags2KHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBufferUsageFlagBits2KHR(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBufferUsageFlags2KHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEBUG_REPORT_INFORMATION_BIT_EXT: + return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT"; + case VK_DEBUG_REPORT_WARNING_BIT_EXT: + return "VK_DEBUG_REPORT_WARNING_BIT_EXT"; + case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: + return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT"; + case VK_DEBUG_REPORT_ERROR_BIT_EXT: + return "VK_DEBUG_REPORT_ERROR_BIT_EXT"; + case VK_DEBUG_REPORT_DEBUG_BIT_EXT: + return "VK_DEBUG_REPORT_DEBUG_BIT_EXT"; + default: + return "Unhandled VkDebugReportFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugReportFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDebugReportFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryHandleTypeFlagBitsNV(VkExternalMemoryHandleTypeFlagBitsNV input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV"; + case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV: + return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV"; + default: + return "Unhandled VkExternalMemoryHandleTypeFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryHandleTypeFlagsNV(VkExternalMemoryHandleTypeFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryHandleTypeFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkExternalMemoryFeatureFlagBitsNV(VkExternalMemoryFeatureFlagBitsNV input_value) { + switch (input_value) { + case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV: + return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV"; + case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV: + return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV"; + case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV: + return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV"; + default: + return "Unhandled VkExternalMemoryFeatureFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExternalMemoryFeatureFlagsNV(VkExternalMemoryFeatureFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExternalMemoryFeatureFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkConditionalRenderingFlagBitsEXT(VkConditionalRenderingFlagBitsEXT input_value) { + switch (input_value) { + case VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT: + return "VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT"; + default: + return "Unhandled VkConditionalRenderingFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkConditionalRenderingFlagsEXT(VkConditionalRenderingFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkConditionalRenderingFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkSurfaceCounterFlagBitsEXT(VkSurfaceCounterFlagBitsEXT input_value) { + switch (input_value) { + case VK_SURFACE_COUNTER_VBLANK_BIT_EXT: + return "VK_SURFACE_COUNTER_VBLANK_BIT_EXT"; + default: + return "Unhandled VkSurfaceCounterFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkSurfaceCounterFlagsEXT(VkSurfaceCounterFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkSurfaceCounterFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDebugUtilsMessageSeverityFlagBitsEXT(VkDebugUtilsMessageSeverityFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT"; + default: + return "Unhandled VkDebugUtilsMessageSeverityFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDebugUtilsMessageSeverityFlagsEXT(VkDebugUtilsMessageSeverityFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDebugUtilsMessageSeverityFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDebugUtilsMessageTypeFlagBitsEXT(VkDebugUtilsMessageTypeFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT"; + case VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT: + return "VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT"; + default: + return "Unhandled VkDebugUtilsMessageTypeFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDebugUtilsMessageTypeFlagsEXT(VkDebugUtilsMessageTypeFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDebugUtilsMessageTypeFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkGeometryFlagBitsKHR(VkGeometryFlagBitsKHR input_value) { + switch (input_value) { + case VK_GEOMETRY_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR: + return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR"; + default: + return "Unhandled VkGeometryFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkGeometryFlagsKHR(VkGeometryFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGeometryFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkGeometryFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkGeometryInstanceFlagBitsKHR(VkGeometryInstanceFlagBitsKHR input_value) { + switch (input_value) { + case VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR: + return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR"; + case VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT: + return "VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT"; + case VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT: + return "VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT"; + default: + return "Unhandled VkGeometryInstanceFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkGeometryInstanceFlagsKHR(VkGeometryInstanceFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGeometryInstanceFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkGeometryInstanceFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBuildAccelerationStructureFlagBitsKHR(VkBuildAccelerationStructureFlagBitsKHR input_value) { + switch (input_value) { + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR"; + case VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV: + return "VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT"; + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT"; +#ifdef VK_ENABLE_BETA_EXTENSIONS + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV"; +#endif // VK_ENABLE_BETA_EXTENSIONS + case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR: + return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR"; + default: + return "Unhandled VkBuildAccelerationStructureFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBuildAccelerationStructureFlagsKHR(VkBuildAccelerationStructureFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBuildAccelerationStructureFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBuildAccelerationStructureFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkHostImageCopyFlagBitsEXT(VkHostImageCopyFlagBitsEXT input_value) { + switch (input_value) { + case VK_HOST_IMAGE_COPY_MEMCPY_EXT: + return "VK_HOST_IMAGE_COPY_MEMCPY_EXT"; + default: + return "Unhandled VkHostImageCopyFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkHostImageCopyFlagsEXT(VkHostImageCopyFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkHostImageCopyFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkHostImageCopyFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPresentScalingFlagBitsEXT(VkPresentScalingFlagBitsEXT input_value) { + switch (input_value) { + case VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT: + return "VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT"; + case VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT: + return "VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT"; + case VK_PRESENT_SCALING_STRETCH_BIT_EXT: + return "VK_PRESENT_SCALING_STRETCH_BIT_EXT"; + default: + return "Unhandled VkPresentScalingFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPresentScalingFlagsEXT(VkPresentScalingFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPresentScalingFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPresentScalingFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPresentGravityFlagBitsEXT(VkPresentGravityFlagBitsEXT input_value) { + switch (input_value) { + case VK_PRESENT_GRAVITY_MIN_BIT_EXT: + return "VK_PRESENT_GRAVITY_MIN_BIT_EXT"; + case VK_PRESENT_GRAVITY_MAX_BIT_EXT: + return "VK_PRESENT_GRAVITY_MAX_BIT_EXT"; + case VK_PRESENT_GRAVITY_CENTERED_BIT_EXT: + return "VK_PRESENT_GRAVITY_CENTERED_BIT_EXT"; + default: + return "Unhandled VkPresentGravityFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkPresentGravityFlagsEXT(VkPresentGravityFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPresentGravityFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPresentGravityFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectStateFlagBitsNV(VkIndirectStateFlagBitsNV input_value) { + switch (input_value) { + case VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV: + return "VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV"; + default: + return "Unhandled VkIndirectStateFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectStateFlagsNV(VkIndirectStateFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectStateFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectStateFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNV(VkIndirectCommandsLayoutUsageFlagBitsNV input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV"; + default: + return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNV(VkIndirectCommandsLayoutUsageFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectCommandsLayoutUsageFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceDiagnosticsConfigFlagBitsNV(VkDeviceDiagnosticsConfigFlagBitsNV input_value) { + switch (input_value) { + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV"; + case VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV: + return "VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV"; + default: + return "Unhandled VkDeviceDiagnosticsConfigFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceDiagnosticsConfigFlagsNV(VkDeviceDiagnosticsConfigFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceDiagnosticsConfigFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceDiagnosticsConfigFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +#ifdef VK_USE_PLATFORM_METAL_EXT +static inline const char* string_VkExportMetalObjectTypeFlagBitsEXT(VkExportMetalObjectTypeFlagBitsEXT input_value) { + switch (input_value) { + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT"; + case VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT: + return "VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT"; + default: + return "Unhandled VkExportMetalObjectTypeFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkExportMetalObjectTypeFlagsEXT(VkExportMetalObjectTypeFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkExportMetalObjectTypeFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkExportMetalObjectTypeFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +#endif // VK_USE_PLATFORM_METAL_EXT +static inline const char* string_VkGraphicsPipelineLibraryFlagBitsEXT(VkGraphicsPipelineLibraryFlagBitsEXT input_value) { + switch (input_value) { + case VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT"; + case VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT"; + case VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT"; + case VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT: + return "VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT"; + default: + return "Unhandled VkGraphicsPipelineLibraryFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkGraphicsPipelineLibraryFlagsEXT(VkGraphicsPipelineLibraryFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkGraphicsPipelineLibraryFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkGraphicsPipelineLibraryFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageCompressionFlagBitsEXT(VkImageCompressionFlagBitsEXT input_value) { + switch (input_value) { + case VK_IMAGE_COMPRESSION_DEFAULT_EXT: + return "VK_IMAGE_COMPRESSION_DEFAULT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT"; + case VK_IMAGE_COMPRESSION_DISABLED_EXT: + return "VK_IMAGE_COMPRESSION_DISABLED_EXT"; + default: + return "Unhandled VkImageCompressionFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageCompressionFlagsEXT(VkImageCompressionFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageCompressionFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageCompressionFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkImageCompressionFixedRateFlagBitsEXT(VkImageCompressionFixedRateFlagBitsEXT input_value) { + switch (input_value) { + case VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT"; + case VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT: + return "VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT"; + default: + return "Unhandled VkImageCompressionFixedRateFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageCompressionFixedRateFlagsEXT(VkImageCompressionFixedRateFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageCompressionFixedRateFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageCompressionFixedRateFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkDeviceAddressBindingFlagBitsEXT(VkDeviceAddressBindingFlagBitsEXT input_value) { + switch (input_value) { + case VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT: + return "VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT"; + default: + return "Unhandled VkDeviceAddressBindingFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkDeviceAddressBindingFlagsEXT(VkDeviceAddressBindingFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkDeviceAddressBindingFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkDeviceAddressBindingFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +#ifdef VK_USE_PLATFORM_FUCHSIA +static inline const char* string_VkImageConstraintsInfoFlagBitsFUCHSIA(VkImageConstraintsInfoFlagBitsFUCHSIA input_value) { + switch (input_value) { + case VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA"; + case VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA: + return "VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA"; + default: + return "Unhandled VkImageConstraintsInfoFlagBitsFUCHSIA"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkImageConstraintsInfoFlagsFUCHSIA(VkImageConstraintsInfoFlagsFUCHSIA input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkImageConstraintsInfoFlagBitsFUCHSIA(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkImageConstraintsInfoFlagsFUCHSIA(0)"); + return ret; +} +#endif // __cplusplus +#endif // VK_USE_PLATFORM_FUCHSIA +static inline const char* string_VkFrameBoundaryFlagBitsEXT(VkFrameBoundaryFlagBitsEXT input_value) { + switch (input_value) { + case VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT: + return "VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT"; + default: + return "Unhandled VkFrameBoundaryFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkFrameBoundaryFlagsEXT(VkFrameBoundaryFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkFrameBoundaryFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkFrameBoundaryFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkBuildMicromapFlagBitsEXT(VkBuildMicromapFlagBitsEXT input_value) { + switch (input_value) { + case VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT: + return "VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT"; + case VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT: + return "VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT"; + case VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT: + return "VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT"; + default: + return "Unhandled VkBuildMicromapFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkBuildMicromapFlagsEXT(VkBuildMicromapFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkBuildMicromapFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkBuildMicromapFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMicromapCreateFlagBitsEXT(VkMicromapCreateFlagBitsEXT input_value) { + switch (input_value) { + case VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT: + return "VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT"; + default: + return "Unhandled VkMicromapCreateFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkMicromapCreateFlagsEXT(VkMicromapCreateFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMicromapCreateFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMicromapCreateFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkPhysicalDeviceSchedulingControlsFlagBitsARM(uint64_t input_value) { + if (input_value == VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM) return "VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM"; + return "Unhandled VkPhysicalDeviceSchedulingControlsFlagBitsARM"; +} + +#ifdef __cplusplus +static inline std::string string_VkPhysicalDeviceSchedulingControlsFlagsARM(VkPhysicalDeviceSchedulingControlsFlagsARM input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkPhysicalDeviceSchedulingControlsFlagBitsARM(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkPhysicalDeviceSchedulingControlsFlagsARM(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkMemoryDecompressionMethodFlagBitsNV(uint64_t input_value) { + if (input_value == VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV) return "VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV"; + return "Unhandled VkMemoryDecompressionMethodFlagBitsNV"; +} + +#ifdef __cplusplus +static inline std::string string_VkMemoryDecompressionMethodFlagsNV(VkMemoryDecompressionMethodFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkMemoryDecompressionMethodFlagBitsNV(static_cast(1ULL << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkMemoryDecompressionMethodFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowGridSizeFlagBitsNV(VkOpticalFlowGridSizeFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV"; + case VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV: + return "VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV"; + default: + return "Unhandled VkOpticalFlowGridSizeFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowGridSizeFlagsNV(VkOpticalFlowGridSizeFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowGridSizeFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowGridSizeFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowUsageFlagBitsNV(VkOpticalFlowUsageFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV: + return "VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV"; + case VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_COST_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_COST_BIT_NV"; + case VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV: + return "VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV"; + default: + return "Unhandled VkOpticalFlowUsageFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowUsageFlagsNV(VkOpticalFlowUsageFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowUsageFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowUsageFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowSessionCreateFlagBitsNV(VkOpticalFlowSessionCreateFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV"; + case VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV: + return "VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV"; + default: + return "Unhandled VkOpticalFlowSessionCreateFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowSessionCreateFlagsNV(VkOpticalFlowSessionCreateFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowSessionCreateFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowSessionCreateFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkOpticalFlowExecuteFlagBitsNV(VkOpticalFlowExecuteFlagBitsNV input_value) { + switch (input_value) { + case VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV: + return "VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV"; + default: + return "Unhandled VkOpticalFlowExecuteFlagBitsNV"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkOpticalFlowExecuteFlagsNV(VkOpticalFlowExecuteFlagsNV input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkOpticalFlowExecuteFlagBitsNV(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkOpticalFlowExecuteFlagsNV(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkShaderCreateFlagBitsEXT(VkShaderCreateFlagBitsEXT input_value) { + switch (input_value) { + case VK_SHADER_CREATE_LINK_STAGE_BIT_EXT: + return "VK_SHADER_CREATE_LINK_STAGE_BIT_EXT"; + case VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT: + return "VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT"; + case VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT: + return "VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT"; + case VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT: + return "VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT"; + case VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT: + return "VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT"; + case VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT: + return "VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT"; + case VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT: + return "VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; + case VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT: + return "VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT"; + default: + return "Unhandled VkShaderCreateFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkShaderCreateFlagsEXT(VkShaderCreateFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkShaderCreateFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkShaderCreateFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectCommandsInputModeFlagBitsEXT(VkIndirectCommandsInputModeFlagBitsEXT input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT"; + case VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT: + return "VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT"; + default: + return "Unhandled VkIndirectCommandsInputModeFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectCommandsInputModeFlagsEXT(VkIndirectCommandsInputModeFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsInputModeFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectCommandsInputModeFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsEXT(VkIndirectCommandsLayoutUsageFlagBitsEXT input_value) { + switch (input_value) { + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT"; + case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT: + return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT"; + default: + return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsEXT"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkIndirectCommandsLayoutUsageFlagsEXT(VkIndirectCommandsLayoutUsageFlagsEXT input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsEXT(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkIndirectCommandsLayoutUsageFlagsEXT(0)"); + return ret; +} +#endif // __cplusplus +static inline const char* string_VkAccelerationStructureCreateFlagBitsKHR(VkAccelerationStructureCreateFlagBitsKHR input_value) { + switch (input_value) { + case VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR: + return "VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR"; + case VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT: + return "VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT"; + case VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV: + return "VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV"; + default: + return "Unhandled VkAccelerationStructureCreateFlagBitsKHR"; + } +} + +#ifdef __cplusplus +static inline std::string string_VkAccelerationStructureCreateFlagsKHR(VkAccelerationStructureCreateFlagsKHR input_value) { + std::string ret; + int index = 0; + while(input_value) { + if (input_value & 1) { + if( !ret.empty()) ret.append("|"); + ret.append(string_VkAccelerationStructureCreateFlagBitsKHR(static_cast(1U << index))); + } + ++index; + input_value >>= 1; + } + if (ret.empty()) ret.append("VkAccelerationStructureCreateFlagsKHR(0)"); + return ret; +} +#endif // __cplusplus +// clang-format on diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 1c2d239db1..d7b237072b 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -40,6 +40,7 @@ #include #include #include +#include GFXRECON_BEGIN_NAMESPACE(gfxrecon) @@ -407,12 +408,6 @@ struct PhysicalDeviceErrorCategory : std::error_category { }; const PhysicalDeviceErrorCategory physical_device_error_category; -struct GeneralErrorCategory : std::error_category { - const char* name() const noexcept override { return "gfxrecon_test_general"; } - std::string message(int err) const override { return to_string(static_cast(err)); } -}; -const GeneralErrorCategory general_error_category; - struct QueueErrorCategory : std::error_category { const char* name() const noexcept override { return "gfxrecon_test_queue"; } std::string message(int err) const override { return to_string(static_cast(err)); } @@ -426,31 +421,13 @@ struct DeviceErrorCategory : std::error_category { const DeviceErrorCategory device_error_category; struct SwapchainErrorCategory : std::error_category { - const char* name() const noexcept override { return "vbk_swapchain"; } + const char* name() const noexcept override { return "gfxrecon_test_swapchain"; } std::string message(int err) const override { return to_string(static_cast(err)); } }; const SwapchainErrorCategory swapchain_error_category; GFXRECON_END_NAMESPACE(detail) -std::error_code make_error_code(InstanceError instance_error) { - return { static_cast(instance_error), detail::instance_error_category }; -} -std::error_code make_error_code(PhysicalDeviceError physical_device_error) { - return { static_cast(physical_device_error), detail::physical_device_error_category }; -} -std::error_code make_error_code(GeneralError general_error) { - return { static_cast(general_error), detail::general_error_category }; -} -std::error_code make_error_code(QueueError queue_error) { - return { static_cast(queue_error), detail::queue_error_category }; -} -std::error_code make_error_code(DeviceError device_error) { - return { static_cast(device_error), detail::device_error_category }; -} -std::error_code make_error_code(SwapchainError swapchain_error) { - return { static_cast(swapchain_error), detail::swapchain_error_category }; -} #define CASE_TO_STRING(CATEGORY, TYPE) \ case CATEGORY::TYPE: \ return #TYPE; @@ -480,16 +457,6 @@ const char* to_string(PhysicalDeviceError err) { return ""; } } -const char* to_string(GeneralError err) { - switch (err) { - case GeneralError::sdl: - return SDL_GetError(); - case GeneralError::unexpected: - return "unexpected error"; - default: - return ""; - } -} const char* to_string(QueueError err) { switch (err) { CASE_TO_STRING(QueueError, present_unavailable) @@ -523,17 +490,72 @@ const char* to_string(SwapchainError err) { } } -Result SystemInfo::get_system_info() { +std::exception to_exception(InstanceError error) { + return std::runtime_error(to_string(error)); +} +std::exception to_exception(InstanceError error, VkResult result) { + std::string message{}; + message.append(to_string(error)); + message.append(": "); + message.append(string_VkResult(result)); + return std::runtime_error(message); +} + +std::exception to_exception(PhysicalDeviceError error) { + return std::runtime_error(to_string(error)); +} +std::exception to_exception(PhysicalDeviceError error, VkResult result) { + std::string message{}; + message.append(to_string(error)); + message.append(": "); + message.append(string_VkResult(result)); + return std::runtime_error(message); +} + +std::exception to_exception(QueueError error) { + return std::runtime_error(to_string(error)); +} +std::exception to_exception(QueueError error, VkResult result) { + std::string message{}; + message.append(to_string(error)); + message.append(": "); + message.append(string_VkResult(result)); + return std::runtime_error(message); +} + +std::exception to_exception(DeviceError error) { + return std::runtime_error(to_string(error)); +} +std::exception to_exception(DeviceError error, VkResult result) { + std::string message{}; + message.append(to_string(error)); + message.append(": "); + message.append(string_VkResult(result)); + return std::runtime_error(message); +} + +std::exception to_exception(SwapchainError error) { + return std::runtime_error(to_string(error)); +} +std::exception to_exception(SwapchainError error, VkResult result) { + std::string message{}; + message.append(to_string(error)); + message.append(": "); + message.append(string_VkResult(result)); + return std::runtime_error(message); +} + +SystemInfo SystemInfo::get_system_info() { if (!detail::vulkan_functions().init_vulkan_funcs(nullptr)) { - return make_error_code(InstanceError::vulkan_unavailable); + throw to_exception(InstanceError::vulkan_unavailable); } return SystemInfo(); } -Result SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { +SystemInfo SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { // Using externally provided function pointers, assume the loader is available if (!detail::vulkan_functions().init_vulkan_funcs(fp_vkGetInstanceProcAddr)) { - return make_error_code(InstanceError::vulkan_unavailable); + throw to_exception(InstanceError::vulkan_unavailable); } return SystemInfo(); } @@ -610,11 +632,9 @@ InstanceBuilder::InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcA } InstanceBuilder::InstanceBuilder() {} -Result InstanceBuilder::build() const { +Instance InstanceBuilder::build() const { - auto sys_info_ret = SystemInfo::get_system_info(info.fp_vkGetInstanceProcAddr); - if (!sys_info_ret) return sys_info_ret.error(); - auto system = sys_info_ret.value(); + auto system = SystemInfo::get_system_info(info.fp_vkGetInstanceProcAddr); uint32_t instance_version = VKB_VK_API_VERSION_1_0; @@ -626,16 +646,16 @@ Result InstanceBuilder::build() const { VkResult res = pfn_vkEnumerateInstanceVersion(&instance_version); // Should always return VK_SUCCESS if (res != VK_SUCCESS && info.required_api_version > 0) - return make_error_code(InstanceError::vulkan_version_unavailable); + throw to_exception(InstanceError::vulkan_version_unavailable); } if (pfn_vkEnumerateInstanceVersion == nullptr || instance_version < info.minimum_instance_version || (info.minimum_instance_version == 0 && instance_version < info.required_api_version)) { if (VK_VERSION_MINOR(info.required_api_version) == 2) - return make_error_code(InstanceError::vulkan_version_1_2_unavailable); + throw to_exception(InstanceError::vulkan_version_1_2_unavailable); else if (VK_VERSION_MINOR(info.required_api_version)) - return make_error_code(InstanceError::vulkan_version_1_1_unavailable); + throw to_exception(InstanceError::vulkan_version_1_1_unavailable); else - return make_error_code(InstanceError::vulkan_version_unavailable); + throw to_exception(InstanceError::vulkan_version_unavailable); } } @@ -701,11 +721,11 @@ Result InstanceBuilder::build() const { bool added_window_exts = check_add_window_ext("VK_EXT_metal_surface"); #endif if (!khr_surface_added || !added_window_exts) - return make_error_code(InstanceError::windowing_extensions_not_present); + throw to_exception(InstanceError::windowing_extensions_not_present); } bool all_extensions_supported = detail::check_extensions_supported(system.available_extensions, extensions); if (!all_extensions_supported) { - return make_error_code(InstanceError::requested_extensions_not_present); + throw to_exception(InstanceError::requested_extensions_not_present); } for (auto& layer : info.layers) @@ -716,7 +736,7 @@ Result InstanceBuilder::build() const { } bool all_layers_supported = detail::check_layers_supported(system.available_layers, layers); if (!all_layers_supported) { - return make_error_code(InstanceError::requested_layers_not_present); + throw to_exception(InstanceError::requested_layers_not_present); } std::vector pNext_chain; @@ -775,7 +795,7 @@ Result InstanceBuilder::build() const { Instance instance; VkResult res = detail::vulkan_functions().fp_vkCreateInstance(&instance_create_info, info.allocation_callbacks, &instance.instance); - if (res != VK_SUCCESS) return Result(InstanceError::failed_create_instance, res); + if (res != VK_SUCCESS) throw to_exception(InstanceError::failed_create_instance, res); detail::vulkan_functions().init_instance_funcs(instance.instance); @@ -787,9 +807,7 @@ Result InstanceBuilder::build() const { info.debug_user_data_pointer, &instance.debug_messenger, info.allocation_callbacks); - if (res != VK_SUCCESS) { - return Result(InstanceError::failed_create_debug_messenger, res); - } + if (res != VK_SUCCESS) throw to_exception(InstanceError::failed_create_debug_messenger, res); } instance.headless = info.headless_context; @@ -1079,19 +1097,18 @@ bool supports_features(const VkPhysicalDeviceFeatures& supported, return extension_supported.match_all(extension_requested); } // clang-format on -// Finds the first queue which supports the desired operations. Returns QUEUE_INDEX_MAX_VALUE if none is found -uint32_t get_first_queue_index(std::vector const& families, VkQueueFlags desired_flags) { +// Finds the first queue which supports the desired operations. +std::optional get_first_queue_index(std::vector const& families, VkQueueFlags desired_flags) { for (uint32_t i = 0; i < static_cast(families.size()); i++) { if ((families[i].queueFlags & desired_flags) == desired_flags) return i; } - return QUEUE_INDEX_MAX_VALUE; + return {}; } // Finds the queue which is separate from the graphics queue and has the desired flag and not the -// undesired flag, but will select it if no better options are available compute support. Returns -// QUEUE_INDEX_MAX_VALUE if none is found. -uint32_t get_separate_queue_index( +// undesired flag, but will select it if no better options are available compute support. +std::optional get_separate_queue_index( std::vector const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) { - uint32_t index = QUEUE_INDEX_MAX_VALUE; + std::optional index = {}; for (uint32_t i = 0; i < static_cast(families.size()); i++) { if ((families[i].queueFlags & desired_flags) == desired_flags && ((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) { if ((families[i].queueFlags & undesired_flags) == 0) { @@ -1104,29 +1121,29 @@ uint32_t get_separate_queue_index( return index; } -// finds the first queue which supports only the desired flag (not graphics or transfer). Returns QUEUE_INDEX_MAX_VALUE if none is found. -uint32_t get_dedicated_queue_index( +// finds the first queue which supports only the desired flag (not graphics or transfer). +std::optional get_dedicated_queue_index( std::vector const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) { for (uint32_t i = 0; i < static_cast(families.size()); i++) { if ((families[i].queueFlags & desired_flags) == desired_flags && (families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0 && (families[i].queueFlags & undesired_flags) == 0) return i; } - return QUEUE_INDEX_MAX_VALUE; + return {}; } -// finds the first queue which supports presenting. returns QUEUE_INDEX_MAX_VALUE if none is found -uint32_t get_present_queue_index( +// finds the first queue which supports presenting. +std::optional get_present_queue_index( VkPhysicalDevice const phys_device, VkSurfaceKHR const surface, std::vector const& families) { for (uint32_t i = 0; i < static_cast(families.size()); i++) { VkBool32 presentSupport = false; if (surface != VK_NULL_HANDLE) { VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceSupportKHR(phys_device, i, surface, &presentSupport); - if (res != VK_SUCCESS) return QUEUE_INDEX_MAX_VALUE; // TODO: determine if this should fail another way + if (res != VK_SUCCESS) return {}; // TODO: determine if this should fail another way } if (presentSupport == VK_TRUE) return i; } - return QUEUE_INDEX_MAX_VALUE; + return {}; } GFXRECON_END_NAMESPACE(detail) @@ -1184,24 +1201,32 @@ PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevi if (criteria.required_version > pd.properties.apiVersion) return PhysicalDevice::Suitable::no; if (criteria.desired_version > pd.properties.apiVersion) suitable = PhysicalDevice::Suitable::partial; - bool dedicated_compute = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != - detail::QUEUE_INDEX_MAX_VALUE; - bool dedicated_transfer = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != - detail::QUEUE_INDEX_MAX_VALUE; - bool separate_compute = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != - detail::QUEUE_INDEX_MAX_VALUE; - bool separate_transfer = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != - detail::QUEUE_INDEX_MAX_VALUE; - - bool present_queue = detail::get_present_queue_index(pd.physical_device, instance_info.surface, pd.queue_families) != - detail::QUEUE_INDEX_MAX_VALUE; - - if (criteria.require_dedicated_compute_queue && !dedicated_compute) return PhysicalDevice::Suitable::no; - if (criteria.require_dedicated_transfer_queue && !dedicated_transfer) return PhysicalDevice::Suitable::no; - if (criteria.require_separate_compute_queue && !separate_compute) return PhysicalDevice::Suitable::no; - if (criteria.require_separate_transfer_queue && !separate_transfer) return PhysicalDevice::Suitable::no; + bool dedicated_compute = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); + bool dedicated_transfer = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); + bool separate_compute = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); + bool separate_transfer = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); + + bool present_queue = detail::get_present_queue_index(pd.physical_device, instance_info.surface, pd.queue_families).has_value(); + + if (criteria.require_dedicated_compute_queue && !dedicated_compute) + { + return PhysicalDevice::Suitable::no; + } + if (criteria.require_dedicated_transfer_queue && !dedicated_transfer) + { + return PhysicalDevice::Suitable::no; + } + if (criteria.require_separate_compute_queue && !separate_compute) + { + return PhysicalDevice::Suitable::no; + } + if (criteria.require_separate_transfer_queue && !separate_transfer) { + return PhysicalDevice::Suitable::no; + } if (criteria.require_present && !present_queue && !criteria.defer_surface_initialization) + { return PhysicalDevice::Suitable::no; + } auto required_extensions_supported = detail::check_device_extension_support(pd.available_extensions, criteria.required_extensions); @@ -1264,7 +1289,7 @@ PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance, VkSurfa criteria.desired_version = instance.api_version; } -Result> PhysicalDeviceSelector::select_impl(DeviceSelectionMode selection) const { +std::vector PhysicalDeviceSelector::select_impl(DeviceSelectionMode selection) const { #if !defined(NDEBUG) // Validation for (const auto& node : criteria.extended_features_chain.nodes) { @@ -1279,7 +1304,7 @@ Result> PhysicalDeviceSelector::select_impl(DeviceSe if (criteria.require_present && !criteria.defer_surface_initialization) { if (instance_info.surface == VK_NULL_HANDLE) - return Result>{ PhysicalDeviceError::no_surface_provided }; + throw to_exception( PhysicalDeviceError::no_surface_provided); } // Get the VkPhysicalDevice handles on the system @@ -1288,10 +1313,10 @@ Result> PhysicalDeviceSelector::select_impl(DeviceSe auto vk_physical_devices_ret = detail::get_vector( vk_physical_devices, detail::vulkan_functions().fp_vkEnumeratePhysicalDevices, instance_info.instance); if (vk_physical_devices_ret != VK_SUCCESS) { - return Result>{ PhysicalDeviceError::failed_enumerate_physical_devices, vk_physical_devices_ret }; + throw to_exception(PhysicalDeviceError::failed_enumerate_physical_devices, vk_physical_devices_ret); } if (vk_physical_devices.size() == 0) { - return Result>{ PhysicalDeviceError::no_physical_devices_found }; + throw to_exception(PhysicalDeviceError::no_physical_devices_found); } auto fill_out_phys_dev_with_criteria = [&](PhysicalDevice& phys_dev) { @@ -1350,35 +1375,32 @@ Result> PhysicalDeviceSelector::select_impl(DeviceSe return physical_devices; } -Result PhysicalDeviceSelector::select(DeviceSelectionMode selection) const { +PhysicalDevice PhysicalDeviceSelector::select(DeviceSelectionMode selection) const { auto const selected_devices = select_impl(selection); - if (!selected_devices) return Result{ selected_devices.error() }; - if (selected_devices.value().size() == 0) { - return Result{ PhysicalDeviceError::no_suitable_device }; + if (selected_devices.size() == 0) { + throw to_exception(PhysicalDeviceError::no_suitable_device); } - return selected_devices.value().at(0); + return selected_devices.at(0); } // Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device -Result> PhysicalDeviceSelector::select_devices(DeviceSelectionMode selection) const { +std::vector PhysicalDeviceSelector::select_devices(DeviceSelectionMode selection) const { auto const selected_devices = select_impl(selection); - if (!selected_devices) return Result>{ selected_devices.error() }; - if (selected_devices.value().size() == 0) { - return Result>{ PhysicalDeviceError::no_suitable_device }; + if (selected_devices.size() == 0) { + throw to_exception(PhysicalDeviceError::no_suitable_device); } - return selected_devices.value(); + return selected_devices; } -Result> PhysicalDeviceSelector::select_device_names(DeviceSelectionMode selection) const { +std::vector PhysicalDeviceSelector::select_device_names(DeviceSelectionMode selection) const { auto const selected_devices = select_impl(selection); - if (!selected_devices) return Result>{ selected_devices.error() }; - if (selected_devices.value().size() == 0) { - return Result>{ PhysicalDeviceError::no_suitable_device }; + if (selected_devices.size() == 0) { + throw to_exception(PhysicalDeviceError::no_suitable_device); } std::vector names; - for (const auto& pd : selected_devices.value()) { + for (const auto& pd : selected_devices) { names.push_back(pd.name); } return names; @@ -1507,16 +1529,16 @@ PhysicalDeviceSelector& PhysicalDeviceSelector::select_first_device_unconditiona // PhysicalDevice bool PhysicalDevice::has_dedicated_compute_queue() const { - return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != detail::QUEUE_INDEX_MAX_VALUE; + return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); } bool PhysicalDevice::has_separate_compute_queue() const { - return detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != detail::QUEUE_INDEX_MAX_VALUE; + return detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); } bool PhysicalDevice::has_dedicated_transfer_queue() const { - return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != detail::QUEUE_INDEX_MAX_VALUE; + return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); } bool PhysicalDevice::has_separate_transfer_queue() const { - return detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != detail::QUEUE_INDEX_MAX_VALUE; + return detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); } std::vector PhysicalDevice::get_queue_families() const { return queue_families; } std::vector PhysicalDevice::get_extensions() const { return extensions_to_enable; } @@ -1600,59 +1622,46 @@ PhysicalDevice::operator VkPhysicalDevice() const { return this->physical_device // ---- Queues ---- // -Result Device::get_queue_index(QueueType type) const { - uint32_t index = detail::QUEUE_INDEX_MAX_VALUE; +std::optional Device::get_queue_index(QueueType type) const { switch (type) { case QueueType::present: - index = detail::get_present_queue_index(physical_device.physical_device, surface, queue_families); - if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::present_unavailable }; + return detail::get_present_queue_index(physical_device.physical_device, surface, queue_families); break; case QueueType::graphics: - index = detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT); - if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::graphics_unavailable }; - break; + return detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT); case QueueType::compute: - index = detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); - if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::compute_unavailable }; - break; + return detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); case QueueType::transfer: - index = detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT); - if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::transfer_unavailable }; - break; + return detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT); default: - return Result{ QueueError::invalid_queue_family_index }; + return {}; } - return index; } -Result Device::get_dedicated_queue_index(QueueType type) const { - uint32_t index = detail::QUEUE_INDEX_MAX_VALUE; + +std::optional Device::get_dedicated_queue_index(QueueType type) const { switch (type) { case QueueType::compute: - index = detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); - if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::compute_unavailable }; - break; + return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); case QueueType::transfer: - index = detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT); - if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result{ QueueError::transfer_unavailable }; - break; + return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT); default: - return Result{ QueueError::invalid_queue_family_index }; + return {}; } - return index; } -Result Device::get_queue(QueueType type) const { +std::optional Device::get_queue(QueueType type) const { auto index = get_queue_index(type); - if (!index.has_value()) return { index.error() }; + if (!index.has_value()) return {}; VkQueue out_queue; - internal_table.fp_vkGetDeviceQueue(device, index.value(), 0, &out_queue); + internal_table.fp_vkGetDeviceQueue(device, *index, 0, &out_queue); return out_queue; } -Result Device::get_dedicated_queue(QueueType type) const { + +std::optional Device::get_dedicated_queue(QueueType type) const { auto index = get_dedicated_queue_index(type); - if (!index.has_value()) return { index.error() }; + if (!index.has_value()) return {}; VkQueue out_queue; - internal_table.fp_vkGetDeviceQueue(device, index.value(), 0, &out_queue); + internal_table.fp_vkGetDeviceQueue(device, *index, 0, &out_queue); return out_queue; } @@ -1673,8 +1682,7 @@ void destroy_device(Device const& device) { DeviceBuilder::DeviceBuilder(PhysicalDevice phys_device) { physical_device = std::move(phys_device); } -Result DeviceBuilder::build() const { - +Device DeviceBuilder::build() const { std::vector queue_descriptions; queue_descriptions.insert(queue_descriptions.end(), info.queue_descriptions.begin(), info.queue_descriptions.end()); @@ -1713,7 +1721,7 @@ Result DeviceBuilder::build() const { } if (user_defined_phys_dev_features_2 && !physical_device.extended_features_chain.nodes.empty()) { - return { DeviceError::VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features }; + throw to_exception(DeviceError::VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features); } // These objects must be alive during the call to vkCreateDevice @@ -1756,7 +1764,7 @@ Result DeviceBuilder::build() const { VkResult res = detail::vulkan_functions().fp_vkCreateDevice( physical_device.physical_device, &device_create_info, info.allocation_callbacks, &device.device); if (res != VK_SUCCESS) { - return { DeviceError::failed_create_device, res }; + throw to_exception(DeviceError::failed_create_device, res); } device.physical_device = physical_device; @@ -1769,10 +1777,12 @@ Result DeviceBuilder::build() const { device.instance_version = physical_device.instance_version; return device; } + DeviceBuilder& DeviceBuilder::custom_queue_setup(std::vector queue_descriptions) { info.queue_descriptions = std::move(queue_descriptions); return *this; } + DeviceBuilder& DeviceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { info.allocation_callbacks = callbacks; return *this; @@ -1796,33 +1806,38 @@ enum class SurfaceSupportError { no_suitable_desired_format }; -struct SurfaceSupportErrorCategory : std::error_category { - const char* name() const noexcept override { return "vbk_surface_support"; } - std::string message(int err) const override { - switch (static_cast(err)) { - CASE_TO_STRING(SurfaceSupportError, surface_handle_null) - CASE_TO_STRING(SurfaceSupportError, failed_get_surface_capabilities) - CASE_TO_STRING(SurfaceSupportError, failed_enumerate_surface_formats) - CASE_TO_STRING(SurfaceSupportError, failed_enumerate_present_modes) - CASE_TO_STRING(SurfaceSupportError, no_suitable_desired_format) - default: - return ""; - } +const char* to_string(SurfaceSupportError err) +{ + switch (err) + { + CASE_TO_STRING(SurfaceSupportError, surface_handle_null) + CASE_TO_STRING(SurfaceSupportError, failed_get_surface_capabilities) + CASE_TO_STRING(SurfaceSupportError, failed_enumerate_surface_formats) + CASE_TO_STRING(SurfaceSupportError, failed_enumerate_present_modes) + CASE_TO_STRING(SurfaceSupportError, no_suitable_desired_format) + default: + return ""; } -}; -const SurfaceSupportErrorCategory surface_support_error_category; +} -std::error_code make_error_code(SurfaceSupportError surface_support_error) { - return { static_cast(surface_support_error), detail::surface_support_error_category }; +std::exception to_exception(SurfaceSupportError error) { + return std::runtime_error(to_string(error)); +} +std::exception to_exception(SurfaceSupportError error, VkResult result) { + std::string message{}; + message.append(to_string(error)); + message.append(": "); + message.append(string_VkResult(result)); + return std::runtime_error(message); } -Result query_surface_support_details(VkPhysicalDevice phys_device, VkSurfaceKHR surface) { - if (surface == VK_NULL_HANDLE) return make_error_code(SurfaceSupportError::surface_handle_null); +SurfaceSupportDetails query_surface_support_details(VkPhysicalDevice phys_device, VkSurfaceKHR surface) { + if (surface == VK_NULL_HANDLE) throw to_exception(SurfaceSupportError::surface_handle_null); VkSurfaceCapabilitiesKHR capabilities; VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities); if (res != VK_SUCCESS) { - return { make_error_code(SurfaceSupportError::failed_get_surface_capabilities), res }; + throw to_exception(SurfaceSupportError::failed_get_surface_capabilities, res); } std::vector formats; @@ -1831,16 +1846,16 @@ Result query_surface_support_details(VkPhysicalDevice phy auto formats_ret = detail::get_vector( formats, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, phys_device, surface); if (formats_ret != VK_SUCCESS) - return { make_error_code(SurfaceSupportError::failed_enumerate_surface_formats), formats_ret }; + throw to_exception(SurfaceSupportError::failed_enumerate_surface_formats, formats_ret); auto present_modes_ret = detail::get_vector( present_modes, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, phys_device, surface); if (present_modes_ret != VK_SUCCESS) - return { make_error_code(SurfaceSupportError::failed_enumerate_present_modes), present_modes_ret }; + throw to_exception(SurfaceSupportError::failed_enumerate_present_modes, present_modes_ret); return SurfaceSupportDetails{ capabilities, formats, present_modes }; } -Result find_desired_surface_format( +std::optional find_desired_surface_format( std::vector const& available_formats, std::vector const& desired_formats) { for (auto const& desired_format : desired_formats) { for (auto const& available_format : available_formats) { @@ -1851,17 +1866,13 @@ Result find_desired_surface_format( } } - // if no desired format is available, we report that no format is suitable to the user request - return { make_error_code(SurfaceSupportError::no_suitable_desired_format) }; + return {}; } VkSurfaceFormatKHR find_best_surface_format( std::vector const& available_formats, std::vector const& desired_formats) { auto surface_format_ret = detail::find_desired_surface_format(available_formats, desired_formats); - if (surface_format_ret.has_value()) return surface_format_ret.value(); - - // use the first available format as a fallback if any desired formats aren't found - return available_formats[0]; + return surface_format_ret.value_or(available_formats[0]); } VkPresentModeKHR find_present_mode(std::vector const& available_resent_modes, @@ -1910,10 +1921,11 @@ SwapchainBuilder::SwapchainBuilder(Device const& device) { auto present = device.get_queue_index(QueueType::present); auto graphics = device.get_queue_index(QueueType::graphics); assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid"); - info.graphics_queue_index = present.value(); - info.present_queue_index = graphics.value(); + info.graphics_queue_index = *present; + info.present_queue_index = *graphics; info.allocation_callbacks = device.allocation_callbacks; } + SwapchainBuilder::SwapchainBuilder(Device const& device, VkSurfaceKHR const surface) { info.physical_device = device.physical_device.physical_device; info.device = device.device; @@ -1924,32 +1936,32 @@ SwapchainBuilder::SwapchainBuilder(Device const& device, VkSurfaceKHR const surf auto present = temp_device.get_queue_index(QueueType::present); auto graphics = temp_device.get_queue_index(QueueType::graphics); assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid"); - info.graphics_queue_index = graphics.value(); - info.present_queue_index = present.value(); + info.graphics_queue_index = *graphics; + info.present_queue_index = *present; info.allocation_callbacks = device.allocation_callbacks; } SwapchainBuilder::SwapchainBuilder(VkPhysicalDevice const physical_device, VkDevice const device, VkSurfaceKHR const surface, - uint32_t graphics_queue_index, - uint32_t present_queue_index) { + std::optional graphics_queue_index, + std::optional present_queue_index) { info.physical_device = physical_device; info.device = device; info.surface = surface; info.graphics_queue_index = graphics_queue_index; info.present_queue_index = present_queue_index; - if (graphics_queue_index == detail::QUEUE_INDEX_MAX_VALUE || present_queue_index == detail::QUEUE_INDEX_MAX_VALUE) { + if (!graphics_queue_index.has_value() || !present_queue_index.has_value()) { auto queue_families = detail::get_vector_noerror( detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, physical_device); - if (graphics_queue_index == detail::QUEUE_INDEX_MAX_VALUE) + if (!graphics_queue_index.has_value()) info.graphics_queue_index = detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT); - if (present_queue_index == detail::QUEUE_INDEX_MAX_VALUE) + if (!present_queue_index.has_value()) info.present_queue_index = detail::get_present_queue_index(physical_device, surface, queue_families); } } -Result SwapchainBuilder::build() const { +Swapchain SwapchainBuilder::build() const { if (info.surface == VK_NULL_HANDLE) { - return Error{ SwapchainError::surface_handle_not_provided }; + throw to_exception(SwapchainError::surface_handle_not_provided); } auto desired_formats = info.desired_formats; @@ -1957,15 +1969,12 @@ Result SwapchainBuilder::build() const { auto desired_present_modes = info.desired_present_modes; if (desired_present_modes.size() == 0) add_desired_present_modes(desired_present_modes); - auto surface_support_ret = detail::query_surface_support_details(info.physical_device, info.surface); - if (!surface_support_ret.has_value()) - return Error{ SwapchainError::failed_query_surface_support_details, surface_support_ret.vk_result() }; - auto surface_support = surface_support_ret.value(); + auto surface_support = detail::query_surface_support_details(info.physical_device, info.surface); uint32_t image_count = info.min_image_count; if (info.required_min_image_count >= 1) { if (info.required_min_image_count < surface_support.capabilities.minImageCount) - return make_error_code(SwapchainError::required_min_image_count_too_low); + throw to_exception(SwapchainError::required_min_image_count_too_low); image_count = info.required_min_image_count; } else if (info.min_image_count == 0) { @@ -1989,8 +1998,7 @@ Result SwapchainBuilder::build() const { image_array_layers = surface_support.capabilities.maxImageArrayLayers; if (info.array_layer_count == 0) image_array_layers = 1; - uint32_t queue_family_indices[] = { info.graphics_queue_index, info.present_queue_index }; - + uint32_t queue_family_indices[] = { *info.graphics_queue_index, *info.present_queue_index }; VkPresentModeKHR present_mode = detail::find_present_mode(surface_support.present_modes, desired_present_modes); @@ -2002,7 +2010,7 @@ Result SwapchainBuilder::build() const { if (is_unextended_present_mode(present_mode) && (info.image_usage_flags & surface_support.capabilities.supportedUsageFlags) != info.image_usage_flags) { - return Error{ SwapchainError::required_usage_not_supported }; + throw to_exception(SwapchainError::required_usage_not_supported); } VkSurfaceTransformFlagBitsKHR pre_transform = info.pre_transform; @@ -2045,7 +2053,7 @@ Result SwapchainBuilder::build() const { auto res = swapchain_create_proc(info.device, &swapchain_create_info, info.allocation_callbacks, &swapchain.swapchain); if (res != VK_SUCCESS) { - return Error{ SwapchainError::failed_create_swapchain, res }; + throw to_exception(SwapchainError::failed_create_swapchain, res); } swapchain.device = info.device; swapchain.image_format = surface_format.format; @@ -2059,31 +2067,26 @@ Result SwapchainBuilder::build() const { detail::vulkan_functions().get_device_proc_addr( info.device, swapchain.internal_table.fp_vkDestroySwapchainKHR, "vkDestroySwapchainKHR"); auto images = swapchain.get_images(); - if (!images) { - return Error{ SwapchainError::failed_get_swapchain_images }; - } swapchain.requested_min_image_count = image_count; swapchain.present_mode = present_mode; - swapchain.image_count = static_cast(images.value().size()); + swapchain.image_count = static_cast(images.size()); swapchain.instance_version = info.instance_version; swapchain.allocation_callbacks = info.allocation_callbacks; return swapchain; } -Result> Swapchain::get_images() { +std::vector Swapchain::get_images() { std::vector swapchain_images; auto swapchain_images_ret = detail::get_vector(swapchain_images, internal_table.fp_vkGetSwapchainImagesKHR, device, swapchain); if (swapchain_images_ret != VK_SUCCESS) { - return Error{ SwapchainError::failed_get_swapchain_images, swapchain_images_ret }; + throw to_exception(SwapchainError::failed_get_swapchain_images, swapchain_images_ret); } return swapchain_images; } -Result> Swapchain::get_image_views() { return get_image_views(nullptr); } -Result> Swapchain::get_image_views(const void* pNext) { - const auto swapchain_images_ret = get_images(); - if (!swapchain_images_ret) return swapchain_images_ret.error(); - const auto& swapchain_images = swapchain_images_ret.value(); +std::vector Swapchain::get_image_views() { return get_image_views(nullptr); } +std::vector Swapchain::get_image_views(const void* pNext) { + const auto swapchain_images = get_images(); bool already_contains_image_view_usage = false; while (pNext) { @@ -2121,7 +2124,7 @@ Result> Swapchain::get_image_views(const void* pNext) { createInfo.subresourceRange.baseArrayLayer = 0; createInfo.subresourceRange.layerCount = 1; VkResult res = internal_table.fp_vkCreateImageView(device, &createInfo, allocation_callbacks, &views[i]); - if (res != VK_SUCCESS) return Error{ SwapchainError::failed_create_swapchain_image_views, res }; + if (res != VK_SUCCESS) throw to_exception(SwapchainError::failed_create_swapchain_image_views, res); } return views; } @@ -2225,15 +2228,15 @@ void SwapchainBuilder::add_desired_present_modes(std::vector& modes.push_back(VK_PRESENT_MODE_FIFO_KHR); } -Result create_window_sdl(const char* window_name, bool resizable, int width, int height) { - if (!SDL_Init(SDL_INIT_VIDEO)) return Result{GeneralError::sdl}; +SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height) { + if (!SDL_Init(SDL_INIT_VIDEO)) throw sdl_exception(); SDL_WindowFlags flags = 0; flags |= SDL_WINDOW_VULKAN; if (resizable) flags |= SDL_WINDOW_RESIZABLE; auto window = SDL_CreateWindow(window_name, width, height, flags); - if (window == nullptr) return Result{GeneralError::sdl}; + if (window == nullptr) throw sdl_exception(); return window; } @@ -2243,27 +2246,23 @@ void destroy_window_sdl(SDL_Window* window) { SDL_Quit(); } -Result create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator) { +VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator) { VkSurfaceKHR surface = VK_NULL_HANDLE; if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) { surface = VK_NULL_HANDLE; - return Result{GeneralError::sdl}; + throw sdl_exception(); } return surface; } -VoidResult create_swapchain(Device const& device, Swapchain& swapchain) { +void create_swapchain(Device const& device, Swapchain& swapchain) { SwapchainBuilder swapchain_builder{ device }; - auto swap_ret = swapchain_builder.set_old_swapchain(swapchain).build(); - if (!swap_ret) return swap_ret.error(); - + auto new_swapchain = swapchain_builder.set_old_swapchain(swapchain).build(); destroy_swapchain(swapchain); - swapchain = swap_ret.value(); - - return SUCCESS; + swapchain = new_swapchain; } -Result create_command_pool( +VkCommandPool create_command_pool( DispatchTable const& disp, uint32_t queue_family_index ) { @@ -2273,13 +2272,11 @@ Result create_command_pool( VkCommandPool command_pool; auto result = disp.createCommandPool(&pool_info, nullptr, &command_pool); - if (result != VK_SUCCESS) { - return Result{GeneralError::unexpected, result}; - } + VERIFY_VK_RESULT("failed to create command pool", result); return command_pool; } -Result create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight) { +Sync create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight) { Sync sync; sync.available_semaphores.resize(max_frames_in_flight); @@ -2295,11 +2292,13 @@ Result create_sync_objects(Swapchain const& swapchain, DispatchTable const fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; for (size_t i = 0; i < max_frames_in_flight; i++) { - if (disp.createSemaphore(&semaphore_info, nullptr, &sync.available_semaphores[i]) != VK_SUCCESS || - disp.createSemaphore(&semaphore_info, nullptr, &sync.finished_semaphore[i]) != VK_SUCCESS || - disp.createFence(&fence_info, nullptr, &sync.in_flight_fences[i]) != VK_SUCCESS) { - return gfxrecon::test::Result{gfxrecon::test::GeneralError::unexpected}; - } + VkResult result; + result = disp.createSemaphore(&semaphore_info, nullptr, &sync.available_semaphores[i]); + VERIFY_VK_RESULT("failed to create available semaphore", result); + result = disp.createSemaphore(&semaphore_info, nullptr, &sync.finished_semaphore[i]); + VERIFY_VK_RESULT("failed to create finished semaphore", result); + result = disp.createFence(&fence_info, nullptr, &sync.in_flight_fences[i]); + VERIFY_VK_RESULT("failed to create in flight fence", result); } return sync; } @@ -2322,7 +2321,7 @@ std::vector readFile(const std::string& filename) { return buffer; } -Result createShaderModule(DispatchTable const& disp, const std::vector& code) { +VkShaderModule createShaderModule(DispatchTable const& disp, const std::vector& code) { VkShaderModuleCreateInfo create_info = {}; create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; create_info.codeSize = code.size(); @@ -2330,18 +2329,28 @@ Result createShaderModule(DispatchTable const& disp, const std:: VkShaderModule shaderModule; auto result = disp.createShaderModule(&create_info, nullptr, &shaderModule); - if (result != VK_SUCCESS) { - return Result{GeneralError::unexpected, result}; - } + VERIFY_VK_RESULT("failed to create shader module", result); return shaderModule; } -Result readShaderFromFile(DispatchTable const& disp, const std::string& filename) { +VkShaderModule readShaderFromFile(DispatchTable const& disp, const std::string& filename) { std::vector code = readFile(filename); return createShaderModule(disp, code); } +std::exception vulkan_exception(const char* message, VkResult result) { + std::string error_message; + error_message.append(message); + error_message.append(": "); + error_message.append(string_VkResult(result)); + return std::runtime_error(error_message); +} + +std::exception sdl_exception() { + return std::runtime_error(SDL_GetError()); +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index c93ef10bed..b743a2f50e 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -24,6 +24,8 @@ #ifndef GFXRECON_TEST_APP_BASE_H #define GFXRECON_TEST_APP_BASE_H +#include +#include #include #include #include @@ -66,111 +68,8 @@ GFXRECON_BEGIN_NAMESPACE(gfxrecon) GFXRECON_BEGIN_NAMESPACE(test) -struct Error { - std::error_code type; - VkResult vk_result = VK_SUCCESS; // optional error value if a vulkan call failed -}; - -template class Result { - public: - Result(const T& value) noexcept : m_value{ value }, m_init{ true } {} - Result(T&& value) noexcept : m_value{ std::move(value) }, m_init{ true } {} - - Result(Error error) noexcept : m_error{ error }, m_init{ false } {} - - Result(std::error_code error_code, VkResult result = VK_SUCCESS) noexcept - : m_error{ error_code, result }, m_init{ false } {} - - ~Result() noexcept { destroy(); } - Result(Result const& expected) noexcept : m_init(expected.m_init) { - if (m_init) - new (&m_value) T{ expected.m_value }; - else - m_error = expected.m_error; - } - Result& operator=(Result const& result) noexcept { - m_init = result.m_init; - if (m_init) - new (&m_value) T{ result.m_value }; - else - m_error = result.m_error; - return *this; - } - Result(Result&& expected) noexcept : m_init(expected.m_init) { - if (m_init) - new (&m_value) T{ std::move(expected.m_value) }; - else - m_error = std::move(expected.m_error); - expected.destroy(); - } - Result& operator=(Result&& result) noexcept { - m_init = result.m_init; - if (m_init) - new (&m_value) T{ std::move(result.m_value) }; - else - m_error = std::move(result.m_error); - return *this; - } - Result& operator=(const T& expect) noexcept { - destroy(); - m_init = true; - new (&m_value) T{ expect }; - return *this; - } - Result& operator=(T&& expect) noexcept { - destroy(); - m_init = true; - new (&m_value) T{ std::move(expect) }; - return *this; - } - Result& operator=(const Error& error) noexcept { - destroy(); - m_init = false; - m_error = error; - return *this; - } - Result& operator=(Error&& error) noexcept { - destroy(); - m_init = false; - m_error = error; - return *this; - } - // clang-format off - const T* operator-> () const noexcept { assert (m_init); return &m_value; } - T* operator-> () noexcept { assert (m_init); return &m_value; } - const T& operator* () const& noexcept { assert (m_init); return m_value; } - T& operator* () & noexcept { assert (m_init); return m_value; } - T operator* () && noexcept { assert (m_init); return std::move (m_value); } - const T& value () const& noexcept { assert (m_init); return m_value; } - T& value () & noexcept { assert (m_init); return m_value; } - T value () && noexcept { assert (m_init); return std::move (m_value); } - - // std::error_code associated with the error - std::error_code error() const { assert (!m_init); return m_error.type; } - // optional VkResult that could of been produced due to the error - VkResult vk_result() const { assert (!m_init); return m_error.vk_result; } - // Returns the struct that holds the std::error_code and VkResult - Error full_error() const { assert (!m_init); return m_error; } - // clang-format on - - // check if the result has an error that matches a specific error case - template bool matches_error(E error_enum_value) const { - return !m_init && static_cast(m_error.type.value()) == error_enum_value; - } - - bool has_value() const { return m_init; } - explicit operator bool() const { return m_init; } - - private: - void destroy() { - if (m_init) m_value.~T(); - } - union { - T m_value; - Error m_error; - }; - bool m_init; -}; +std::exception vulkan_exception(const char* message, VkResult result); +std::exception sdl_exception(); GFXRECON_BEGIN_NAMESPACE(detail) struct GenericFeaturesPNextNode { @@ -229,10 +128,6 @@ enum class InstanceError { requested_extensions_not_present, windowing_extensions_not_present, }; -enum class GeneralError { - sdl, - unexpected, -}; enum class PhysicalDeviceError { no_surface_provided, failed_enumerate_physical_devices, @@ -261,23 +156,27 @@ enum class SwapchainError { required_usage_not_supported }; -std::error_code make_error_code(InstanceError instance_error); -std::error_code make_error_code(PhysicalDeviceError physical_device_error); -std::error_code make_error_code(GeneralError general_error); -std::error_code make_error_code(QueueError queue_error); -std::error_code make_error_code(DeviceError device_error); -std::error_code make_error_code(SwapchainError swapchain_error); - const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s); const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s); const char* to_string(InstanceError err); const char* to_string(PhysicalDeviceError err); -const char* to_string(GeneralError err); const char* to_string(QueueError err); const char* to_string(DeviceError err); const char* to_string(SwapchainError err); +std::exception to_exception(InstanceError err); +std::exception to_exception(PhysicalDeviceError err); +std::exception to_exception(QueueError err); +std::exception to_exception(DeviceError err); +std::exception to_exception(SwapchainError err); + +std::exception to_exception(InstanceError err, VkResult result); +std::exception to_exception(PhysicalDeviceError err, VkResult result); +std::exception to_exception(QueueError err, VkResult result); +std::exception to_exception(DeviceError err, VkResult result); +std::exception to_exception(SwapchainError err, VkResult result); + // Gathers useful information about the available vulkan capabilities, like layers and instance // extensions. Use this for enabling features conditionally, ie if you would like an extension but // can use a fallback if it isn't supported but need to know if support is available first. @@ -287,8 +186,8 @@ struct SystemInfo { public: // Use get_system_info to create a SystemInfo struct. This is because loading vulkan could fail. - static Result get_system_info(); - static Result get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr); + static SystemInfo get_system_info(); + static SystemInfo get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr); // Returns true if a layer is available bool is_layer_available(const char* layer_name) const; @@ -377,7 +276,7 @@ class InstanceBuilder { explicit InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr); // Create a VkInstance. Return an error if it failed. - Result build() const; + Instance build() const; // Sets the name of the application. Defaults to "" if none is provided. InstanceBuilder& set_app_name(const char* app_name); @@ -615,14 +514,14 @@ class PhysicalDeviceSelector { // Return the first device which is suitable // use the `selection` parameter to configure if partially - Result select(DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; + PhysicalDevice select(DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; // Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device - Result> select_devices( + std::vector select_devices( DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; // Return the names of all devices which are considered suitable - intended for applications which want to let the user pick the physical device - Result> select_device_names( + std::vector select_device_names( DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; // Set the surface in which the physical device should render to. @@ -754,19 +653,12 @@ class PhysicalDeviceSelector { PhysicalDevice::Suitable is_device_suitable(PhysicalDevice const& phys_device) const; - Result> select_impl(DeviceSelectionMode selection) const; + std::vector select_impl(DeviceSelectionMode selection) const; }; // ---- Queue ---- // enum class QueueType { present, graphics, compute, transfer }; -GFXRECON_BEGIN_NAMESPACE(detail) - -// Sentinel value, used in implementation only -inline const uint32_t QUEUE_INDEX_MAX_VALUE = 65536; - -GFXRECON_END_NAMESPACE(detail) - // ---- Device ---- // struct Device { @@ -778,13 +670,13 @@ struct Device { PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; uint32_t instance_version = VKB_VK_API_VERSION_1_0; - Result get_queue_index(QueueType type) const; + std::optional get_queue_index(QueueType type) const; // Only a compute or transfer queue type is valid. All other queue types do not support a 'dedicated' queue index - Result get_dedicated_queue_index(QueueType type) const; + std::optional get_dedicated_queue_index(QueueType type) const; - Result get_queue(QueueType type) const; + std::optional get_queue(QueueType type) const; // Only a compute or transfer queue type is valid. All other queue types do not support a 'dedicated' queue - Result get_dedicated_queue(QueueType type) const; + std::optional get_dedicated_queue(QueueType type) const; // Return a loaded dispatch table DispatchTable make_table() const; @@ -817,7 +709,7 @@ class DeviceBuilder { // Any features and extensions that are requested/required in PhysicalDeviceSelector are automatically enabled. explicit DeviceBuilder(PhysicalDevice physical_device); - Result build() const; + Device build() const; // For Advanced Users: specify the exact list of VkDeviceQueueCreateInfo's needed for the application. // If a custom queue setup is provided, getting the queues and queue indexes is up to the application. @@ -859,13 +751,13 @@ struct Swapchain { VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; // Returns a vector of VkImage handles to the swapchain. - Result> get_images(); + std::vector get_images(); // Returns a vector of VkImageView's to the VkImage's of the swapchain. // VkImageViews must be destroyed. The pNext chain must be a nullptr or a valid // structure. - Result> get_image_views(); - Result> get_image_views(const void* pNext); + std::vector get_image_views(); + std::vector get_image_views(const void* pNext); void destroy_image_views(std::vector const& image_views); // A conversion function which allows this Swapchain to be used @@ -897,10 +789,10 @@ class SwapchainBuilder { explicit SwapchainBuilder(VkPhysicalDevice const physical_device, VkDevice const device, VkSurfaceKHR const surface, - uint32_t graphics_queue_index = detail::QUEUE_INDEX_MAX_VALUE, - uint32_t present_queue_index = detail::QUEUE_INDEX_MAX_VALUE); + std::optional graphics_queue_index = {}, + std::optional present_queue_index = {}); - Result build() const; + Swapchain build() const; // Set the oldSwapchain member of VkSwapchainCreateInfoKHR. // For use in rebuilding a swapchain. @@ -1003,8 +895,8 @@ class SwapchainBuilder { uint32_t min_image_count = 0; uint32_t required_min_image_count = 0; VkImageUsageFlags image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; - uint32_t graphics_queue_index = 0; - uint32_t present_queue_index = 0; + std::optional graphics_queue_index = {}; + std::optional present_queue_index = {}; VkSurfaceTransformFlagBitsKHR pre_transform = static_cast(0); #if defined(__ANDROID__) VkCompositeAlphaFlagBitsKHR composite_alpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR; @@ -1018,16 +910,12 @@ class SwapchainBuilder { } info; }; -struct Void {}; -typedef Result VoidResult; -const Void SUCCESS = Void{}; - -Result create_window_sdl(const char* window_name, bool resizable, int width, int height); +SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height); void destroy_window_sdl(SDL_Window * window); -Result create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); -VoidResult create_swapchain(Device const&, Swapchain& swapchain); +VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); +void create_swapchain(Device const&, Swapchain& swapchain); -Result create_command_pool(DispatchTable const& disp, uint32_t queue_family_index); +VkCommandPool create_command_pool(DispatchTable const& disp, uint32_t queue_family_index); struct Sync { std::vector available_semaphores; @@ -1045,27 +933,18 @@ struct Sync { Sync& operator =(Sync&&) = default; }; -Result create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight); +Sync create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight); std::vector readFile(const std::string& filename); -Result createShaderModule(DispatchTable const& disp, const std::vector& code); +VkShaderModule createShaderModule(DispatchTable const& disp, const std::vector& code); + +VkShaderModule readShaderFromFile(DispatchTable const& disp, const std::string& filename); -Result readShaderFromFile(DispatchTable const& disp, const std::string& filename); +#define VERIFY_VK_RESULT(message, result) { if (result != VK_SUCCESS) throw gfxrecon::test::vulkan_exception(message, result); } GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) -namespace std { - -template <> struct is_error_code_enum : true_type {}; -template <> struct is_error_code_enum : true_type {}; -template <> struct is_error_code_enum : true_type {}; -template <> struct is_error_code_enum : true_type {}; -template <> struct is_error_code_enum : true_type {}; -template <> struct is_error_code_enum : true_type {}; - -} // namespace std - #endif // GFXRECON_TEST_APP_BASE_H diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 814d5cee13..1014c3f6c9 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -21,12 +21,8 @@ ** DEALINGS IN THE SOFTWARE. */ -#include - #include #include -#include -#include #include @@ -72,55 +68,36 @@ struct RenderData { gfxrecon::test::Sync sync; }; -gfxrecon::test::VoidResult device_initialization(Init& init) { - auto window_ret = gfxrecon::test::create_window_sdl("Vulkan Triangle", true, 1024, 1024); - if (!window_ret) return window_ret.error(); - init.window = window_ret.value(); +void device_initialization(Init& init) { + init.window = gfxrecon::test::create_window_sdl("Vulkan Triangle", true, 1024, 1024); gfxrecon::test::InstanceBuilder instance_builder; - auto instance_ret = instance_builder.use_default_debug_messenger().request_validation_layers().build(); - if (!instance_ret) return instance_ret.error(); - init.instance = instance_ret.value(); + init.instance = instance_builder.use_default_debug_messenger().request_validation_layers().build(); init.inst_disp = init.instance.make_table(); - auto surface_ret = gfxrecon::test::create_surface_sdl(init.instance, init.window); - if (!surface_ret) return surface_ret.error(); - init.surface = surface_ret.value(); + init.surface = gfxrecon::test::create_surface_sdl(init.instance, init.window); gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); - auto phys_device_ret = phys_device_selector.set_surface(init.surface).select(); - if (!phys_device_ret) return phys_device_ret.error(); - gfxrecon::test::PhysicalDevice physical_device = phys_device_ret.value(); + auto physical_device = phys_device_selector.set_surface(init.surface).select(); gfxrecon::test::DeviceBuilder device_builder{ physical_device }; - auto device_ret = device_builder.build(); - if (!device_ret) return device_ret.error(); - init.device = device_ret.value(); + init.device = device_builder.build(); init.disp = init.device.make_table(); - - return gfxrecon::test::SUCCESS; } -int get_queues(Init& init, RenderData& data) { - auto gq = init.device.get_queue(gfxrecon::test::QueueType::graphics); - if (!gq.has_value()) { - std::cout << "failed to get graphics queue: " << gq.error().message() << "\n"; - return -1; - } - data.graphics_queue = gq.value(); +void get_queues(Init& init, RenderData& data) { + auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); + if (!graphics_queue.has_value()) throw std::runtime_error("could not get graphics queue"); + data.graphics_queue = *graphics_queue; - auto pq = init.device.get_queue(gfxrecon::test::QueueType::present); - if (!pq.has_value()) { - std::cout << "failed to get present queue: " << pq.error().message() << "\n"; - return -1; - } - data.present_queue = pq.value(); - return 0; + auto present_queue = init.device.get_queue(gfxrecon::test::QueueType::present); + if (!present_queue.has_value()) throw std::runtime_error("could not get present queue"); + data.present_queue = *present_queue; } -int create_render_pass(Init& init, RenderData& data) { +void create_render_pass(Init& init, RenderData& data) { VkAttachmentDescription color_attachment = {}; color_attachment.format = init.swapchain.image_format; color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; @@ -157,27 +134,13 @@ int create_render_pass(Init& init, RenderData& data) { render_pass_info.dependencyCount = 1; render_pass_info.pDependencies = &dependency; - if (init.disp.createRenderPass(&render_pass_info, nullptr, &data.render_pass) != VK_SUCCESS) { - std::cout << "failed to create render pass\n"; - return -1; // failed to create render pass! - } - return 0; + auto result = init.disp.createRenderPass(&render_pass_info, nullptr, &data.render_pass); + VERIFY_VK_RESULT("failed to create render pass", result); } -int create_graphics_pipeline(Init& init, RenderData& data) { - auto vert_module_ret = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); - if (!vert_module_ret) { - std::cout << vert_module_ret.error().message() << "\n"; - return -1; - } - auto vert_module = vert_module_ret.value(); - - auto frag_module_ret = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); - if (!frag_module_ret) { - std::cout << frag_module_ret.error().message() << "\n"; - return -1; - } - auto frag_module = frag_module_ret.value(); +void create_graphics_pipeline(Init& init, RenderData& data) { + auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); + auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); VkPipelineShaderStageCreateInfo vert_stage_info = {}; vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; @@ -258,10 +221,8 @@ int create_graphics_pipeline(Init& init, RenderData& data) { pipeline_layout_info.setLayoutCount = 0; pipeline_layout_info.pushConstantRangeCount = 0; - if (init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &data.pipeline_layout) != VK_SUCCESS) { - std::cout << "failed to create pipeline layout\n"; - return -1; // failed to create pipeline layout - } + auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &data.pipeline_layout); + VERIFY_VK_RESULT("failed to create pipeline layout", result); std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; @@ -286,17 +247,14 @@ int create_graphics_pipeline(Init& init, RenderData& data) { pipeline_info.subpass = 0; pipeline_info.basePipelineHandle = VK_NULL_HANDLE; - if (init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &data.graphics_pipeline) != VK_SUCCESS) { - std::cout << "failed to create pipline\n"; - return -1; // failed to create graphics pipeline - } + result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &data.graphics_pipeline); + VERIFY_VK_RESULT("failed to create graphics pipeline", result); init.disp.destroyShaderModule(frag_module, nullptr); init.disp.destroyShaderModule(vert_module, nullptr); - return 0; } -gfxrecon::test::VoidResult create_framebuffers( +void create_framebuffers( gfxrecon::test::Swapchain const& swapchain, gfxrecon::test::DispatchTable const& disp, std::vector& framebuffers, @@ -318,13 +276,11 @@ gfxrecon::test::VoidResult create_framebuffers( framebuffer_info.layers = 1; auto result = disp.createFramebuffer(&framebuffer_info, nullptr, &framebuffers[i]); - if (result != VK_SUCCESS) return gfxrecon::test::VoidResult{gfxrecon::test::GeneralError::unexpected, result}; + VERIFY_VK_RESULT("failed to create framebuffer", result); } - - return gfxrecon::test::SUCCESS; } -int create_command_buffers(Init& init, RenderData& data) { +void create_command_buffers(Init& init, RenderData& data) { data.command_buffers.resize(data.framebuffers.size()); VkCommandBufferAllocateInfo allocInfo = {}; @@ -333,17 +289,15 @@ int create_command_buffers(Init& init, RenderData& data) { allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; allocInfo.commandBufferCount = (uint32_t)data.command_buffers.size(); - if (init.disp.allocateCommandBuffers(&allocInfo, data.command_buffers.data()) != VK_SUCCESS) { - return -1; // failed to allocate command buffers; - } + auto result = init.disp.allocateCommandBuffers(&allocInfo, data.command_buffers.data()); + VERIFY_VK_RESULT("failed to allocate command buffers", result); for (size_t i = 0; i < data.command_buffers.size(); i++) { VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - if (init.disp.beginCommandBuffer(data.command_buffers[i], &begin_info) != VK_SUCCESS) { - return -1; // failed to begin recording command buffer - } + result = init.disp.beginCommandBuffer(data.command_buffers[i], &begin_info); + VERIFY_VK_RESULT("failed to create command buffer", result); VkRenderPassBeginInfo render_pass_info = {}; render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; @@ -378,15 +332,12 @@ int create_command_buffers(Init& init, RenderData& data) { init.disp.cmdEndRenderPass(data.command_buffers[i]); - if (init.disp.endCommandBuffer(data.command_buffers[i]) != VK_SUCCESS) { - std::cout << "failed to record command buffer\n"; - return -1; // failed to record command buffer! - } + result = init.disp.endCommandBuffer(data.command_buffers[i]); + VERIFY_VK_RESULT("failed to end command buffer", result); } - return 0; } -int recreate_swapchain(Init& init, RenderData& data) { +void recreate_swapchain(Init& init, RenderData& data) { init.disp.deviceWaitIdle(); init.disp.destroyCommandPool(data.command_pool, nullptr); @@ -397,42 +348,27 @@ int recreate_swapchain(Init& init, RenderData& data) { init.swapchain.destroy_image_views(data.swapchain_image_views); - auto swapchain_ret = gfxrecon::test::create_swapchain(init.device, init.swapchain); - if (!swapchain_ret) - { - std::cout << swapchain_ret.error().message() << "\n"; - return -1; - } + gfxrecon::test::create_swapchain(init.device, init.swapchain); - data.swapchain_images = init.swapchain.get_images().value(); - data.swapchain_image_views = init.swapchain.get_image_views().value(); + data.swapchain_images = init.swapchain.get_images(); + data.swapchain_image_views = init.swapchain.get_image_views(); - auto framebuffer_ret = create_framebuffers( + create_framebuffers( init.swapchain, init.disp, data.framebuffers, data.swapchain_image_views, data.render_pass ); - if (!framebuffer_ret) { - std::cout << framebuffer_ret.error().message() << "\n"; - return -1; - } - auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); - auto command_pool_ret = gfxrecon::test::create_command_pool(init.disp, queue_family_index); - if (!command_pool_ret) - { - std::cout << command_pool_ret.error().message() << "\n"; - return -1; - } - data.command_pool = command_pool_ret.value(); + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); + if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); + data.command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - if (0 != create_command_buffers(init, data)) return -1; - return 0; + create_command_buffers(init, data); } -int draw_frame(Init& init, RenderData& data) { +void draw_frame(Init& init, RenderData& data) { init.disp.waitForFences(1, &data.sync.in_flight_fences[data.current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; @@ -442,8 +378,7 @@ int draw_frame(Init& init, RenderData& data) { if (result == VK_ERROR_OUT_OF_DATE_KHR) { return recreate_swapchain(init, data); } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { - std::cout << "failed to acquire swapchain image. Error " << result << "\n"; - return -1; + throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } if (data.sync.image_in_flight[image_index] != VK_NULL_HANDLE) { @@ -469,10 +404,8 @@ int draw_frame(Init& init, RenderData& data) { init.disp.resetFences(1, &data.sync.in_flight_fences[data.current_frame]); - if (init.disp.queueSubmit(data.graphics_queue, 1, &submitInfo, data.sync.in_flight_fences[data.current_frame]) != VK_SUCCESS) { - std::cout << "failed to submit draw command buffer\n"; - return -1; //"failed to submit draw command buffer - } + result = init.disp.queueSubmit(data.graphics_queue, 1, &submitInfo, data.sync.in_flight_fences[data.current_frame]); + VERIFY_VK_RESULT("failed to submit queue", result); VkPresentInfoKHR present_info = {}; present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; @@ -489,13 +422,10 @@ int draw_frame(Init& init, RenderData& data) { result = init.disp.queuePresentKHR(data.present_queue, &present_info); if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { return recreate_swapchain(init, data); - } else if (result != VK_SUCCESS) { - std::cout << "failed to present swapchain image\n"; - return -1; } + VERIFY_VK_RESULT("failed to present queue", result); data.current_frame = (data.current_frame + 1) % MAX_FRAMES_IN_FLIGHT; - return 0; } void cleanup(Init& init, RenderData& data) { @@ -526,62 +456,36 @@ void cleanup(Init& init, RenderData& data) { const int NUM_FRAMES = 10; -int run() { +void run() { Init init; RenderData render_data; - auto init_ret = device_initialization((init)); - if (!init_ret) - { - std::cout << init_ret.error().message() << "\n"; - return -1; - } + device_initialization((init)); - auto swapchain_ret = gfxrecon::test::create_swapchain(init.device, init.swapchain); - if (!swapchain_ret) - { - std::cout << swapchain_ret.error().message() << "\n"; - return -1; - } + gfxrecon::test::create_swapchain(init.device, init.swapchain); - if (0 != get_queues(init, render_data)) return -1; - if (0 != create_render_pass(init, render_data)) return -1; - if (0 != create_graphics_pipeline(init, render_data)) return -1; + get_queues(init, render_data); + create_render_pass(init, render_data); + create_graphics_pipeline(init, render_data); - render_data.swapchain_images = init.swapchain.get_images().value(); - render_data.swapchain_image_views = init.swapchain.get_image_views().value(); + render_data.swapchain_images = init.swapchain.get_images(); + render_data.swapchain_image_views = init.swapchain.get_image_views(); - auto framebuffer_ret = create_framebuffers( + create_framebuffers( init.swapchain, init.disp, render_data.framebuffers, render_data.swapchain_image_views, render_data.render_pass ); - if (!framebuffer_ret) - { - std::cout << framebuffer_ret.error().message() << "\n"; - return -1; - } - auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics).value(); - auto command_pool_ret = gfxrecon::test::create_command_pool(init.disp, queue_family_index); - if (!command_pool_ret) - { - std::cout << command_pool_ret.error().message() << "\n"; - return -1; - } - render_data.command_pool = command_pool_ret.value(); + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); + if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); + render_data.command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - if (0 != create_command_buffers(init, render_data)) return -1; + create_command_buffers(init, render_data); - auto sync_ret = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); - if (!sync_ret) - { - std::cout << command_pool_ret.error().message() << "\n"; - return -1; - } - render_data.sync = std::move(sync_ret.value()); + render_data.sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); for (int frame = 0; frame < NUM_FRAMES; frame++) { SDL_Event windowEvent; @@ -591,17 +495,12 @@ int run() { } } - int res = draw_frame(init, render_data); - if (res != 0) { - std::cout << "failed to draw frame \n"; - return -1; - } + draw_frame(init, render_data); } init.disp.deviceWaitIdle(); cleanup(init, render_data); - return 0; } GFXRECON_END_NAMESPACE(triangle) @@ -611,5 +510,11 @@ GFXRECON_END_NAMESPACE(test_app) GFXRECON_END_NAMESPACE(gfxrecon) int main(int argc, char *argv[]) { - return gfxrecon::test_app::triangle::run(); + try { + gfxrecon::test_app::triangle::run(); + return 0; + } catch (std::exception e) { + std::cout << e.what() << std::endl; + return -1; + } } From b80ecdcc43e3823aeb67319899246f7332760f2d Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 14:24:07 -0400 Subject: [PATCH 12/70] Move init to test app base --- test/test_apps/common/test_app_base.cpp | 23 ++++++++++++ test/test_apps/common/test_app_base.h | 12 ++++++ test/test_apps/triangle/triangle.cpp | 49 +++++-------------------- 3 files changed, 44 insertions(+), 40 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index d7b237072b..3b42ef76f7 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2351,6 +2351,29 @@ std::exception sdl_exception() { return std::runtime_error(SDL_GetError()); } +Init device_initialization(const std::string& window_name) { + Init init; + + init.window = gfxrecon::test::create_window_sdl(window_name.data(), true, 1024, 1024); + + gfxrecon::test::InstanceBuilder instance_builder; + init.instance = instance_builder.use_default_debug_messenger().request_validation_layers().build(); + + init.inst_disp = init.instance.make_table(); + + init.surface = gfxrecon::test::create_surface_sdl(init.instance, init.window); + + gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); + auto physical_device = phys_device_selector.set_surface(init.surface).select(); + + gfxrecon::test::DeviceBuilder device_builder{ physical_device }; + init.device = device_builder.build(); + + init.disp = init.device.make_table(); + + return init; +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index b743a2f50e..f886b9df64 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -943,6 +943,18 @@ VkShaderModule readShaderFromFile(DispatchTable const& disp, const std::string& #define VERIFY_VK_RESULT(message, result) { if (result != VK_SUCCESS) throw gfxrecon::test::vulkan_exception(message, result); } +struct Init { + SDL_Window* window; + Instance instance; + InstanceDispatchTable inst_disp; + VkSurfaceKHR surface; + Device device; + DispatchTable disp; + Swapchain swapchain; +}; + +Init device_initialization(const std::string& window_name); + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 1014c3f6c9..c6f01f7150 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -38,16 +38,6 @@ GFXRECON_BEGIN_NAMESPACE(triangle) const int MAX_FRAMES_IN_FLIGHT = 2; -struct Init { - SDL_Window* window; - gfxrecon::test::Instance instance; - gfxrecon::test::InstanceDispatchTable inst_disp; - VkSurfaceKHR surface; - gfxrecon::test::Device device; - gfxrecon::test::DispatchTable disp; - gfxrecon::test::Swapchain swapchain; -}; - struct RenderData { VkQueue graphics_queue; VkQueue present_queue; @@ -68,26 +58,7 @@ struct RenderData { gfxrecon::test::Sync sync; }; -void device_initialization(Init& init) { - init.window = gfxrecon::test::create_window_sdl("Vulkan Triangle", true, 1024, 1024); - - gfxrecon::test::InstanceBuilder instance_builder; - init.instance = instance_builder.use_default_debug_messenger().request_validation_layers().build(); - - init.inst_disp = init.instance.make_table(); - - init.surface = gfxrecon::test::create_surface_sdl(init.instance, init.window); - - gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); - auto physical_device = phys_device_selector.set_surface(init.surface).select(); - - gfxrecon::test::DeviceBuilder device_builder{ physical_device }; - init.device = device_builder.build(); - - init.disp = init.device.make_table(); -} - -void get_queues(Init& init, RenderData& data) { +void get_queues(gfxrecon::test::Init& init, RenderData& data) { auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); if (!graphics_queue.has_value()) throw std::runtime_error("could not get graphics queue"); data.graphics_queue = *graphics_queue; @@ -97,7 +68,7 @@ void get_queues(Init& init, RenderData& data) { data.present_queue = *present_queue; } -void create_render_pass(Init& init, RenderData& data) { +void create_render_pass(gfxrecon::test::Init& init, RenderData& data) { VkAttachmentDescription color_attachment = {}; color_attachment.format = init.swapchain.image_format; color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; @@ -138,7 +109,7 @@ void create_render_pass(Init& init, RenderData& data) { VERIFY_VK_RESULT("failed to create render pass", result); } -void create_graphics_pipeline(Init& init, RenderData& data) { +void create_graphics_pipeline(gfxrecon::test::Init& init, RenderData& data) { auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); @@ -280,7 +251,7 @@ void create_framebuffers( } } -void create_command_buffers(Init& init, RenderData& data) { +void create_command_buffers(gfxrecon::test::Init& init, RenderData& data) { data.command_buffers.resize(data.framebuffers.size()); VkCommandBufferAllocateInfo allocInfo = {}; @@ -337,7 +308,7 @@ void create_command_buffers(Init& init, RenderData& data) { } } -void recreate_swapchain(Init& init, RenderData& data) { +void recreate_swapchain(gfxrecon::test::Init& init, RenderData& data) { init.disp.deviceWaitIdle(); init.disp.destroyCommandPool(data.command_pool, nullptr); @@ -368,7 +339,7 @@ void recreate_swapchain(Init& init, RenderData& data) { create_command_buffers(init, data); } -void draw_frame(Init& init, RenderData& data) { +void draw_frame(gfxrecon::test::Init& init, RenderData& data) { init.disp.waitForFences(1, &data.sync.in_flight_fences[data.current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; @@ -428,7 +399,7 @@ void draw_frame(Init& init, RenderData& data) { data.current_frame = (data.current_frame + 1) % MAX_FRAMES_IN_FLIGHT; } -void cleanup(Init& init, RenderData& data) { +void cleanup(gfxrecon::test::Init& init, RenderData& data) { for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { init.disp.destroySemaphore(data.sync.finished_semaphore[i], nullptr); init.disp.destroySemaphore(data.sync.available_semaphores[i], nullptr); @@ -457,13 +428,11 @@ void cleanup(Init& init, RenderData& data) { const int NUM_FRAMES = 10; void run() { - Init init; - RenderData render_data; - - device_initialization((init)); + auto init = gfxrecon::test::device_initialization("triangle"); gfxrecon::test::create_swapchain(init.device, init.swapchain); + RenderData render_data; get_queues(init, render_data); create_render_pass(init, render_data); create_graphics_pipeline(init, render_data); From fe7a1708cbcdec4ed5b6d628084e3b50f1da4240 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 14:29:51 -0400 Subject: [PATCH 13/70] Simplify create framebuffers --- test/test_apps/triangle/triangle.cpp | 58 +++++++++------------------- 1 file changed, 19 insertions(+), 39 deletions(-) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index c6f01f7150..2021883d4e 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -58,16 +58,6 @@ struct RenderData { gfxrecon::test::Sync sync; }; -void get_queues(gfxrecon::test::Init& init, RenderData& data) { - auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); - if (!graphics_queue.has_value()) throw std::runtime_error("could not get graphics queue"); - data.graphics_queue = *graphics_queue; - - auto present_queue = init.device.get_queue(gfxrecon::test::QueueType::present); - if (!present_queue.has_value()) throw std::runtime_error("could not get present queue"); - data.present_queue = *present_queue; -} - void create_render_pass(gfxrecon::test::Init& init, RenderData& data) { VkAttachmentDescription color_attachment = {}; color_attachment.format = init.swapchain.image_format; @@ -225,28 +215,22 @@ void create_graphics_pipeline(gfxrecon::test::Init& init, RenderData& data) { init.disp.destroyShaderModule(vert_module, nullptr); } -void create_framebuffers( - gfxrecon::test::Swapchain const& swapchain, - gfxrecon::test::DispatchTable const& disp, - std::vector& framebuffers, - std::vector& swapchain_image_views, - VkRenderPass render_pass -) { - framebuffers.resize(swapchain_image_views.size()); +void create_framebuffers(gfxrecon::test::Init const& init, RenderData& data) { + data.framebuffers.resize(data.swapchain_image_views.size()); - for (size_t i = 0; i < swapchain_image_views.size(); i++) { - VkImageView attachments[] = { swapchain_image_views[i] }; + for (size_t i = 0; i < data.swapchain_image_views.size(); i++) { + VkImageView attachments[] = { data.swapchain_image_views[i] }; VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebuffer_info.renderPass = render_pass; + framebuffer_info.renderPass = data.render_pass; framebuffer_info.attachmentCount = 1; framebuffer_info.pAttachments = attachments; - framebuffer_info.width = swapchain.extent.width; - framebuffer_info.height = swapchain.extent.height; + framebuffer_info.width = init.swapchain.extent.width; + framebuffer_info.height = init.swapchain.extent.height; framebuffer_info.layers = 1; - auto result = disp.createFramebuffer(&framebuffer_info, nullptr, &framebuffers[i]); + auto result = init.disp.createFramebuffer(&framebuffer_info, nullptr, &data.framebuffers[i]); VERIFY_VK_RESULT("failed to create framebuffer", result); } } @@ -324,13 +308,7 @@ void recreate_swapchain(gfxrecon::test::Init& init, RenderData& data) { data.swapchain_images = init.swapchain.get_images(); data.swapchain_image_views = init.swapchain.get_image_views(); - create_framebuffers( - init.swapchain, - init.disp, - data.framebuffers, - data.swapchain_image_views, - data.render_pass - ); + create_framebuffers(init, data); auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); @@ -433,20 +411,22 @@ void run() { gfxrecon::test::create_swapchain(init.device, init.swapchain); RenderData render_data; - get_queues(init, render_data); + + auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); + if (!graphics_queue.has_value()) throw std::runtime_error("could not get graphics queue"); + render_data.graphics_queue = *graphics_queue; + + auto present_queue = init.device.get_queue(gfxrecon::test::QueueType::present); + if (!present_queue.has_value()) throw std::runtime_error("could not get present queue"); + render_data.present_queue = *present_queue; + create_render_pass(init, render_data); create_graphics_pipeline(init, render_data); render_data.swapchain_images = init.swapchain.get_images(); render_data.swapchain_image_views = init.swapchain.get_image_views(); - create_framebuffers( - init.swapchain, - init.disp, - render_data.framebuffers, - render_data.swapchain_image_views, - render_data.render_pass - ); + create_framebuffers(init, render_data); auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); From 3fb08a9dd9b815ac499d0be81f0cb77148993006 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 14:46:41 -0400 Subject: [PATCH 14/70] Move swapchain creation for init to test app base --- test/test_apps/common/test_app_base.cpp | 5 +++++ test/test_apps/common/test_app_base.h | 2 ++ test/test_apps/triangle/triangle.cpp | 21 +++++++-------------- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 3b42ef76f7..acdebd104e 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2371,6 +2371,11 @@ Init device_initialization(const std::string& window_name) { init.disp = init.device.make_table(); + gfxrecon::test::create_swapchain(init.device, init.swapchain); + + init.swapchain_images = init.swapchain.get_images(); + init.swapchain_image_views = init.swapchain.get_image_views(); + return init; } diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index f886b9df64..048a0e9956 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -951,6 +951,8 @@ struct Init { Device device; DispatchTable disp; Swapchain swapchain; + std::vector swapchain_images; + std::vector swapchain_image_views; }; Init device_initialization(const std::string& window_name); diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 2021883d4e..a9ea4a5e0e 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -42,8 +42,6 @@ struct RenderData { VkQueue graphics_queue; VkQueue present_queue; - std::vector swapchain_images; - std::vector swapchain_image_views; std::vector framebuffers; VkRenderPass render_pass; @@ -216,10 +214,10 @@ void create_graphics_pipeline(gfxrecon::test::Init& init, RenderData& data) { } void create_framebuffers(gfxrecon::test::Init const& init, RenderData& data) { - data.framebuffers.resize(data.swapchain_image_views.size()); + data.framebuffers.resize(init.swapchain_image_views.size()); - for (size_t i = 0; i < data.swapchain_image_views.size(); i++) { - VkImageView attachments[] = { data.swapchain_image_views[i] }; + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + VkImageView attachments[] = { init.swapchain_image_views[i] }; VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; @@ -301,12 +299,12 @@ void recreate_swapchain(gfxrecon::test::Init& init, RenderData& data) { init.disp.destroyFramebuffer(framebuffer, nullptr); } - init.swapchain.destroy_image_views(data.swapchain_image_views); + init.swapchain.destroy_image_views(init.swapchain_image_views); gfxrecon::test::create_swapchain(init.device, init.swapchain); - data.swapchain_images = init.swapchain.get_images(); - data.swapchain_image_views = init.swapchain.get_image_views(); + init.swapchain_images = init.swapchain.get_images(); + init.swapchain_image_views = init.swapchain.get_image_views(); create_framebuffers(init, data); @@ -394,7 +392,7 @@ void cleanup(gfxrecon::test::Init& init, RenderData& data) { init.disp.destroyPipelineLayout(data.pipeline_layout, nullptr); init.disp.destroyRenderPass(data.render_pass, nullptr); - init.swapchain.destroy_image_views(data.swapchain_image_views); + init.swapchain.destroy_image_views(init.swapchain_image_views); gfxrecon::test::destroy_swapchain(init.swapchain); gfxrecon::test::destroy_device(init.device); @@ -408,8 +406,6 @@ const int NUM_FRAMES = 10; void run() { auto init = gfxrecon::test::device_initialization("triangle"); - gfxrecon::test::create_swapchain(init.device, init.swapchain); - RenderData render_data; auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); @@ -423,9 +419,6 @@ void run() { create_render_pass(init, render_data); create_graphics_pipeline(init, render_data); - render_data.swapchain_images = init.swapchain.get_images(); - render_data.swapchain_image_views = init.swapchain.get_image_views(); - create_framebuffers(init, render_data); auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); From 6f06e314a50097efca69c69c30b18491f95993f4 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 14:53:57 -0400 Subject: [PATCH 15/70] Test app base version of recreate swapchain --- test/test_apps/common/test_app_base.cpp | 21 ++++++++++++++ test/test_apps/common/test_app_base.h | 4 +++ test/test_apps/triangle/triangle.cpp | 37 +++++++++---------------- 3 files changed, 38 insertions(+), 24 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index acdebd104e..ada3eb7a3d 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2379,6 +2379,27 @@ Init device_initialization(const std::string& window_name) { return init; } +void cleanup(gfxrecon::test::Init& init) { + init.swapchain.destroy_image_views(init.swapchain_image_views); + + gfxrecon::test::destroy_swapchain(init.swapchain); + gfxrecon::test::destroy_device(init.device); + gfxrecon::test::destroy_surface(init.instance, init.surface); + gfxrecon::test::destroy_instance(init.instance); + gfxrecon::test::destroy_window_sdl(init.window); +} + +void recreate_swapchain(gfxrecon::test::Init& init, bool wait_for_idle) { + if (wait_for_idle) init.disp.deviceWaitIdle(); + + init.swapchain.destroy_image_views(init.swapchain_image_views); + + gfxrecon::test::create_swapchain(init.device, init.swapchain); + + init.swapchain_images = init.swapchain.get_images(); + init.swapchain_image_views = init.swapchain.get_image_views(); +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 048a0e9956..599266e38d 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -957,6 +957,10 @@ struct Init { Init device_initialization(const std::string& window_name); +void cleanup(Init& init); + +void recreate_swapchain(Init& init, bool wait_for_idle = true); + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index a9ea4a5e0e..18af7f93ac 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -299,12 +299,7 @@ void recreate_swapchain(gfxrecon::test::Init& init, RenderData& data) { init.disp.destroyFramebuffer(framebuffer, nullptr); } - init.swapchain.destroy_image_views(init.swapchain_image_views); - - gfxrecon::test::create_swapchain(init.device, init.swapchain); - - init.swapchain_images = init.swapchain.get_images(); - init.swapchain_image_views = init.swapchain.get_image_views(); + gfxrecon::test::recreate_swapchain(init, false); create_framebuffers(init, data); @@ -375,30 +370,22 @@ void draw_frame(gfxrecon::test::Init& init, RenderData& data) { data.current_frame = (data.current_frame + 1) % MAX_FRAMES_IN_FLIGHT; } -void cleanup(gfxrecon::test::Init& init, RenderData& data) { +void cleanup(gfxrecon::test::DispatchTable const& disp, RenderData& data) { for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - init.disp.destroySemaphore(data.sync.finished_semaphore[i], nullptr); - init.disp.destroySemaphore(data.sync.available_semaphores[i], nullptr); - init.disp.destroyFence(data.sync.in_flight_fences[i], nullptr); + disp.destroySemaphore(data.sync.finished_semaphore[i], nullptr); + disp.destroySemaphore(data.sync.available_semaphores[i], nullptr); + disp.destroyFence(data.sync.in_flight_fences[i], nullptr); } - init.disp.destroyCommandPool(data.command_pool, nullptr); + disp.destroyCommandPool(data.command_pool, nullptr); for (auto framebuffer : data.framebuffers) { - init.disp.destroyFramebuffer(framebuffer, nullptr); + disp.destroyFramebuffer(framebuffer, nullptr); } - init.disp.destroyPipeline(data.graphics_pipeline, nullptr); - init.disp.destroyPipelineLayout(data.pipeline_layout, nullptr); - init.disp.destroyRenderPass(data.render_pass, nullptr); - - init.swapchain.destroy_image_views(init.swapchain_image_views); - - gfxrecon::test::destroy_swapchain(init.swapchain); - gfxrecon::test::destroy_device(init.device); - gfxrecon::test::destroy_surface(init.instance, init.surface); - gfxrecon::test::destroy_instance(init.instance); - gfxrecon::test::destroy_window_sdl(init.window); + disp.destroyPipeline(data.graphics_pipeline, nullptr); + disp.destroyPipelineLayout(data.pipeline_layout, nullptr); + disp.destroyRenderPass(data.render_pass, nullptr); } const int NUM_FRAMES = 10; @@ -442,7 +429,9 @@ void run() { init.disp.deviceWaitIdle(); - cleanup(init, render_data); + cleanup(init.disp, render_data); + + gfxrecon::test::cleanup(init); } GFXRECON_END_NAMESPACE(triangle) From 6d8d3a3205b03b83f30c82741bca4272d901eb18 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 15:15:37 -0400 Subject: [PATCH 16/70] Test app base class --- test/test_apps/common/test_app_base.cpp | 34 ++++- test/test_apps/common/test_app_base.h | 15 +- test/test_apps/triangle/triangle.cpp | 187 ++++++++++++------------ 3 files changed, 142 insertions(+), 94 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index ada3eb7a3d..68f121eb2f 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2379,7 +2379,7 @@ Init device_initialization(const std::string& window_name) { return init; } -void cleanup(gfxrecon::test::Init& init) { +void cleanup_init(gfxrecon::test::Init& init) { init.swapchain.destroy_image_views(init.swapchain_image_views); gfxrecon::test::destroy_swapchain(init.swapchain); @@ -2400,6 +2400,38 @@ void recreate_swapchain(gfxrecon::test::Init& init, bool wait_for_idle) { init.swapchain_image_views = init.swapchain.get_image_views(); } +void TestAppBase::run(const std::string& window_name) +{ + init = device_initialization(window_name); + + this->setup(); + + bool running = true; + int frame_num = 0; + while (running) { + SDL_Event windowEvent; + while (SDL_PollEvent(&windowEvent)) { + if (windowEvent.type == SDL_EVENT_QUIT) { + break; + } + } + + running = frame(frame_num); + ++frame_num; + } + + this->init.disp.deviceWaitIdle(); + + this->cleanup(); + + cleanup_init(init); +} + +void TestAppBase::setup() {} +void TestAppBase::cleanup() {} + +TestAppBase::TestAppBase() {} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 599266e38d..3438ad55fb 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -957,10 +957,23 @@ struct Init { Init device_initialization(const std::string& window_name); -void cleanup(Init& init); +void cleanup_init(Init& init); void recreate_swapchain(Init& init, bool wait_for_idle = true); +class TestAppBase { + public: + void run(const std::string& window_name); + protected: + TestAppBase(); + + virtual void setup(); + virtual bool frame(const int frame_num) = 0; + virtual void cleanup(); + + Init init; +}; + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 18af7f93ac..ffd7c73281 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -38,7 +38,10 @@ GFXRECON_BEGIN_NAMESPACE(triangle) const int MAX_FRAMES_IN_FLIGHT = 2; -struct RenderData { +class Triangle : public gfxrecon::test::TestAppBase { + public: + Triangle() : gfxrecon::test::TestAppBase() {} + private: VkQueue graphics_queue; VkQueue present_queue; @@ -54,9 +57,19 @@ struct RenderData { size_t current_frame = 0; gfxrecon::test::Sync sync; + + void create_render_pass(); + void create_graphics_pipeline(); + void create_framebuffers(); + void create_command_buffers(); + void recreate_swapchain(); + void draw_frame(); + void cleanup() override; + bool frame(const int frame_num) override; + void setup() override; }; -void create_render_pass(gfxrecon::test::Init& init, RenderData& data) { +void Triangle::create_render_pass() { VkAttachmentDescription color_attachment = {}; color_attachment.format = init.swapchain.image_format; color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; @@ -93,11 +106,11 @@ void create_render_pass(gfxrecon::test::Init& init, RenderData& data) { render_pass_info.dependencyCount = 1; render_pass_info.pDependencies = &dependency; - auto result = init.disp.createRenderPass(&render_pass_info, nullptr, &data.render_pass); + auto result = init.disp.createRenderPass(&render_pass_info, nullptr, &this->render_pass); VERIFY_VK_RESULT("failed to create render pass", result); } -void create_graphics_pipeline(gfxrecon::test::Init& init, RenderData& data) { +void Triangle::create_graphics_pipeline() { auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); @@ -180,7 +193,7 @@ void create_graphics_pipeline(gfxrecon::test::Init& init, RenderData& data) { pipeline_layout_info.setLayoutCount = 0; pipeline_layout_info.pushConstantRangeCount = 0; - auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &data.pipeline_layout); + auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &this->pipeline_layout); VERIFY_VK_RESULT("failed to create pipeline layout", result); std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; @@ -201,61 +214,61 @@ void create_graphics_pipeline(gfxrecon::test::Init& init, RenderData& data) { pipeline_info.pMultisampleState = &multisampling; pipeline_info.pColorBlendState = &color_blending; pipeline_info.pDynamicState = &dynamic_info; - pipeline_info.layout = data.pipeline_layout; - pipeline_info.renderPass = data.render_pass; + pipeline_info.layout = this->pipeline_layout; + pipeline_info.renderPass = this->render_pass; pipeline_info.subpass = 0; pipeline_info.basePipelineHandle = VK_NULL_HANDLE; - result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &data.graphics_pipeline); + result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &this->graphics_pipeline); VERIFY_VK_RESULT("failed to create graphics pipeline", result); init.disp.destroyShaderModule(frag_module, nullptr); init.disp.destroyShaderModule(vert_module, nullptr); } -void create_framebuffers(gfxrecon::test::Init const& init, RenderData& data) { - data.framebuffers.resize(init.swapchain_image_views.size()); +void Triangle::create_framebuffers() { + this->framebuffers.resize(init.swapchain_image_views.size()); for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { VkImageView attachments[] = { init.swapchain_image_views[i] }; VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebuffer_info.renderPass = data.render_pass; + framebuffer_info.renderPass = this->render_pass; framebuffer_info.attachmentCount = 1; framebuffer_info.pAttachments = attachments; framebuffer_info.width = init.swapchain.extent.width; framebuffer_info.height = init.swapchain.extent.height; framebuffer_info.layers = 1; - auto result = init.disp.createFramebuffer(&framebuffer_info, nullptr, &data.framebuffers[i]); + auto result = init.disp.createFramebuffer(&framebuffer_info, nullptr, &this->framebuffers[i]); VERIFY_VK_RESULT("failed to create framebuffer", result); } } -void create_command_buffers(gfxrecon::test::Init& init, RenderData& data) { - data.command_buffers.resize(data.framebuffers.size()); +void Triangle::create_command_buffers() { + this->command_buffers.resize(this->framebuffers.size()); VkCommandBufferAllocateInfo allocInfo = {}; allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; - allocInfo.commandPool = data.command_pool; + allocInfo.commandPool = this->command_pool; allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; - allocInfo.commandBufferCount = (uint32_t)data.command_buffers.size(); + allocInfo.commandBufferCount = (uint32_t)this->command_buffers.size(); - auto result = init.disp.allocateCommandBuffers(&allocInfo, data.command_buffers.data()); + auto result = init.disp.allocateCommandBuffers(&allocInfo, this->command_buffers.data()); VERIFY_VK_RESULT("failed to allocate command buffers", result); - for (size_t i = 0; i < data.command_buffers.size(); i++) { + for (size_t i = 0; i < this->command_buffers.size(); i++) { VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - result = init.disp.beginCommandBuffer(data.command_buffers[i], &begin_info); + result = init.disp.beginCommandBuffer(this->command_buffers[i], &begin_info); VERIFY_VK_RESULT("failed to create command buffer", result); VkRenderPassBeginInfo render_pass_info = {}; render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; - render_pass_info.renderPass = data.render_pass; - render_pass_info.framebuffer = data.framebuffers[i]; + render_pass_info.renderPass = this->render_pass; + render_pass_info.framebuffer = this->framebuffers[i]; render_pass_info.renderArea.offset = { 0, 0 }; render_pass_info.renderArea.extent = init.swapchain.extent; VkClearValue clearColor{ { { 0.0f, 0.0f, 0.0f, 1.0f } } }; @@ -274,79 +287,79 @@ void create_command_buffers(gfxrecon::test::Init& init, RenderData& data) { scissor.offset = { 0, 0 }; scissor.extent = init.swapchain.extent; - init.disp.cmdSetViewport(data.command_buffers[i], 0, 1, &viewport); - init.disp.cmdSetScissor(data.command_buffers[i], 0, 1, &scissor); + init.disp.cmdSetViewport(this->command_buffers[i], 0, 1, &viewport); + init.disp.cmdSetScissor(this->command_buffers[i], 0, 1, &scissor); - init.disp.cmdBeginRenderPass(data.command_buffers[i], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); + init.disp.cmdBeginRenderPass(this->command_buffers[i], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); - init.disp.cmdBindPipeline(data.command_buffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, data.graphics_pipeline); + init.disp.cmdBindPipeline(this->command_buffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, this->graphics_pipeline); - init.disp.cmdDraw(data.command_buffers[i], 3, 1, 0, 0); + init.disp.cmdDraw(this->command_buffers[i], 3, 1, 0, 0); - init.disp.cmdEndRenderPass(data.command_buffers[i]); + init.disp.cmdEndRenderPass(this->command_buffers[i]); - result = init.disp.endCommandBuffer(data.command_buffers[i]); + result = init.disp.endCommandBuffer(this->command_buffers[i]); VERIFY_VK_RESULT("failed to end command buffer", result); } } -void recreate_swapchain(gfxrecon::test::Init& init, RenderData& data) { +void Triangle::recreate_swapchain() { init.disp.deviceWaitIdle(); - init.disp.destroyCommandPool(data.command_pool, nullptr); + init.disp.destroyCommandPool(this->command_pool, nullptr); - for (auto framebuffer : data.framebuffers) { + for (auto framebuffer : this->framebuffers) { init.disp.destroyFramebuffer(framebuffer, nullptr); } gfxrecon::test::recreate_swapchain(init, false); - create_framebuffers(init, data); + create_framebuffers(); auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - data.command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); + this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - create_command_buffers(init, data); + create_command_buffers(); } -void draw_frame(gfxrecon::test::Init& init, RenderData& data) { - init.disp.waitForFences(1, &data.sync.in_flight_fences[data.current_frame], VK_TRUE, UINT64_MAX); +void Triangle::draw_frame() { + init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; VkResult result = init.disp.acquireNextImageKHR( - init.swapchain, UINT64_MAX, data.sync.available_semaphores[data.current_frame], VK_NULL_HANDLE, &image_index); + init.swapchain, UINT64_MAX, this->sync.available_semaphores[this->current_frame], VK_NULL_HANDLE, &image_index); if (result == VK_ERROR_OUT_OF_DATE_KHR) { - return recreate_swapchain(init, data); + return recreate_swapchain(); } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } - if (data.sync.image_in_flight[image_index] != VK_NULL_HANDLE) { - init.disp.waitForFences(1, &data.sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); + if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) { + init.disp.waitForFences(1, &this->sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); } - data.sync.image_in_flight[image_index] = data.sync.in_flight_fences[data.current_frame]; + this->sync.image_in_flight[image_index] = this->sync.in_flight_fences[this->current_frame]; VkSubmitInfo submitInfo = {}; submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - VkSemaphore wait_semaphores[] = { data.sync.available_semaphores[data.current_frame] }; + VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; submitInfo.waitSemaphoreCount = 1; submitInfo.pWaitSemaphores = wait_semaphores; submitInfo.pWaitDstStageMask = wait_stages; submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &data.command_buffers[image_index]; + submitInfo.pCommandBuffers = &this->command_buffers[image_index]; - VkSemaphore signal_semaphores[] = { data.sync.finished_semaphore[data.current_frame] }; + VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; submitInfo.signalSemaphoreCount = 1; submitInfo.pSignalSemaphores = signal_semaphores; - init.disp.resetFences(1, &data.sync.in_flight_fences[data.current_frame]); + init.disp.resetFences(1, &this->sync.in_flight_fences[this->current_frame]); - result = init.disp.queueSubmit(data.graphics_queue, 1, &submitInfo, data.sync.in_flight_fences[data.current_frame]); + result = init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); VERIFY_VK_RESULT("failed to submit queue", result); VkPresentInfoKHR present_info = {}; @@ -361,77 +374,66 @@ void draw_frame(gfxrecon::test::Init& init, RenderData& data) { present_info.pImageIndices = &image_index; - result = init.disp.queuePresentKHR(data.present_queue, &present_info); + result = init.disp.queuePresentKHR(this->present_queue, &present_info); if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { - return recreate_swapchain(init, data); + return recreate_swapchain(); } VERIFY_VK_RESULT("failed to present queue", result); - data.current_frame = (data.current_frame + 1) % MAX_FRAMES_IN_FLIGHT; + this->current_frame = (this->current_frame + 1) % MAX_FRAMES_IN_FLIGHT; } -void cleanup(gfxrecon::test::DispatchTable const& disp, RenderData& data) { +void Triangle::cleanup() { for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - disp.destroySemaphore(data.sync.finished_semaphore[i], nullptr); - disp.destroySemaphore(data.sync.available_semaphores[i], nullptr); - disp.destroyFence(data.sync.in_flight_fences[i], nullptr); + init.disp.destroySemaphore(this->sync.finished_semaphore[i], nullptr); + init.disp.destroySemaphore(this->sync.available_semaphores[i], nullptr); + init.disp.destroyFence(this->sync.in_flight_fences[i], nullptr); } - disp.destroyCommandPool(data.command_pool, nullptr); + init.disp.destroyCommandPool(this->command_pool, nullptr); - for (auto framebuffer : data.framebuffers) { - disp.destroyFramebuffer(framebuffer, nullptr); + for (auto framebuffer : this->framebuffers) { + init.disp.destroyFramebuffer(framebuffer, nullptr); } - disp.destroyPipeline(data.graphics_pipeline, nullptr); - disp.destroyPipelineLayout(data.pipeline_layout, nullptr); - disp.destroyRenderPass(data.render_pass, nullptr); + init.disp.destroyPipeline(this->graphics_pipeline, nullptr); + init.disp.destroyPipelineLayout(this->pipeline_layout, nullptr); + init.disp.destroyRenderPass(this->render_pass, nullptr); } const int NUM_FRAMES = 10; -void run() { - auto init = gfxrecon::test::device_initialization("triangle"); - - RenderData render_data; - +void Triangle::setup() +{ auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); - if (!graphics_queue.has_value()) throw std::runtime_error("could not get graphics queue"); - render_data.graphics_queue = *graphics_queue; + if (!graphics_queue.has_value()) + throw std::runtime_error("could not get graphics queue"); + this->graphics_queue = *graphics_queue; auto present_queue = init.device.get_queue(gfxrecon::test::QueueType::present); - if (!present_queue.has_value()) throw std::runtime_error("could not get present queue"); - render_data.present_queue = *present_queue; + if (!present_queue.has_value()) + throw std::runtime_error("could not get present queue"); + this->present_queue = *present_queue; - create_render_pass(init, render_data); - create_graphics_pipeline(init, render_data); + create_render_pass(); + create_graphics_pipeline(); - create_framebuffers(init, render_data); + create_framebuffers(); auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); - if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - render_data.command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - - create_command_buffers(init, render_data); + if (!queue_family_index) + throw std::runtime_error("could not find graphics queue"); + this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - render_data.sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); + create_command_buffers(); - for (int frame = 0; frame < NUM_FRAMES; frame++) { - SDL_Event windowEvent; - while (SDL_PollEvent(&windowEvent)) { - if (windowEvent.type == SDL_EVENT_QUIT) { - break; - } - } - - draw_frame(init, render_data); - } - - init.disp.deviceWaitIdle(); - - cleanup(init.disp, render_data); + this->sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); +} - gfxrecon::test::cleanup(init); +bool Triangle::frame(const int frame_num) +{ + draw_frame(); + return frame_num >= NUM_FRAMES; } GFXRECON_END_NAMESPACE(triangle) @@ -442,7 +444,8 @@ GFXRECON_END_NAMESPACE(gfxrecon) int main(int argc, char *argv[]) { try { - gfxrecon::test_app::triangle::run(); + gfxrecon::test_app::triangle::Triangle triangle{}; + triangle.run("triangle"); return 0; } catch (std::exception e) { std::cout << e.what() << std::endl; From d66b9b37b019893d38b3467cf244d73ccc81c358 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 15:38:36 -0400 Subject: [PATCH 17/70] Test apps can configure instance creation, physical device selection, and device creation --- test/test_apps/common/test_app_base.cpp | 59 ++++++++++++++++++++----- test/test_apps/common/test_app_base.h | 10 ++++- 2 files changed, 56 insertions(+), 13 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 68f121eb2f..af7c5d8fcc 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2351,22 +2351,27 @@ std::exception sdl_exception() { return std::runtime_error(SDL_GetError()); } -Init device_initialization(const std::string& window_name) { - Init init; - +void device_initialization_phase_1(const std::string& window_name, Init& init) +{ init.window = gfxrecon::test::create_window_sdl(window_name.data(), true, 1024, 1024); +} - gfxrecon::test::InstanceBuilder instance_builder; - init.instance = instance_builder.use_default_debug_messenger().request_validation_layers().build(); +void device_initialization_phase_2(InstanceBuilder const& instance_builder, Init& init) +{ + init.instance = instance_builder.build(); init.inst_disp = init.instance.make_table(); init.surface = gfxrecon::test::create_surface_sdl(init.instance, init.window); +} - gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); - auto physical_device = phys_device_selector.set_surface(init.surface).select(); +PhysicalDevice device_initialization_phase_3(PhysicalDeviceSelector& phys_device_selector, Init& init) +{ + return phys_device_selector.set_surface(init.surface).select(); +} - gfxrecon::test::DeviceBuilder device_builder{ physical_device }; +void device_initialization_phase_4(DeviceBuilder const& device_builder, Init& init) +{ init.device = device_builder.build(); init.disp = init.device.make_table(); @@ -2375,6 +2380,21 @@ Init device_initialization(const std::string& window_name) { init.swapchain_images = init.swapchain.get_images(); init.swapchain_image_views = init.swapchain.get_image_views(); +} + +Init device_initialization(const std::string& window_name) { + Init init; + + device_initialization_phase_1(window_name, init); + + gfxrecon::test::InstanceBuilder instance_builder; + device_initialization_phase_2(instance_builder, init); + + gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); + auto physical_device = device_initialization_phase_3(phys_device_selector, init); + + gfxrecon::test::DeviceBuilder device_builder{ physical_device }; + device_initialization_phase_4(device_builder, init); return init; } @@ -2402,7 +2422,19 @@ void recreate_swapchain(gfxrecon::test::Init& init, bool wait_for_idle) { void TestAppBase::run(const std::string& window_name) { - init = device_initialization(window_name); + device_initialization_phase_1(window_name, this->init); + + gfxrecon::test::InstanceBuilder instance_builder; + this->configure_instance_builder(instance_builder); + device_initialization_phase_2(instance_builder, this->init); + + gfxrecon::test::PhysicalDeviceSelector phys_device_selector(this->init.instance); + this->configure_physical_device_selector(phys_device_selector); + auto physical_device = device_initialization_phase_3(phys_device_selector, this->init); + + gfxrecon::test::DeviceBuilder device_builder{ physical_device }; + this->configure_device_builder(device_builder, physical_device); + device_initialization_phase_4(device_builder, this->init); this->setup(); @@ -2424,13 +2456,16 @@ void TestAppBase::run(const std::string& window_name) this->cleanup(); - cleanup_init(init); + cleanup_init(this->init); } void TestAppBase::setup() {} void TestAppBase::cleanup() {} - -TestAppBase::TestAppBase() {} +void TestAppBase::configure_instance_builder(InstanceBuilder& instance_builder) { + instance_builder.use_default_debug_messenger().request_validation_layers(); +} +void TestAppBase::configure_physical_device_selector(PhysicalDeviceSelector& phys_device_selector) {} +void TestAppBase::configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device) {} GFXRECON_END_NAMESPACE(test) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 3438ad55fb..9d47b5157e 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -965,11 +965,19 @@ class TestAppBase { public: void run(const std::string& window_name); protected: - TestAppBase(); + TestAppBase() = default; + ~TestAppBase() = default; + TestAppBase(const TestAppBase&) = delete; + TestAppBase& operator=(const TestAppBase&) = delete; + TestAppBase(TestAppBase&&) = delete; + TestAppBase& operator=(TestAppBase&&) = delete; virtual void setup(); virtual bool frame(const int frame_num) = 0; virtual void cleanup(); + virtual void configure_instance_builder(InstanceBuilder& instance_builder); + virtual void configure_physical_device_selector(PhysicalDeviceSelector& phys_device_selector); + virtual void configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device); Init init; }; From 9b5e836532d3f211cf1f6d8f1b023e986813c59f Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 15:44:11 -0400 Subject: [PATCH 18/70] Cleanup triangle --- test/test_apps/triangle/triangle.cpp | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index ffd7c73281..69f798a520 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -323,7 +323,11 @@ void Triangle::recreate_swapchain() { create_command_buffers(); } -void Triangle::draw_frame() { +const int NUM_FRAMES = 10; +#define IS_DONE(frame_num) frame_num >= NUM_FRAMES; + +bool Triangle::frame(const int frame_num) +{ init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; @@ -331,7 +335,8 @@ void Triangle::draw_frame() { init.swapchain, UINT64_MAX, this->sync.available_semaphores[this->current_frame], VK_NULL_HANDLE, &image_index); if (result == VK_ERROR_OUT_OF_DATE_KHR) { - return recreate_swapchain(); + recreate_swapchain(); + return IS_DONE(frame_num); } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } @@ -376,11 +381,14 @@ void Triangle::draw_frame() { result = init.disp.queuePresentKHR(this->present_queue, &present_info); if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { - return recreate_swapchain(); + recreate_swapchain(); + return frame_num >= NUM_FRAMES; } VERIFY_VK_RESULT("failed to present queue", result); this->current_frame = (this->current_frame + 1) % MAX_FRAMES_IN_FLIGHT; + + return IS_DONE(frame_num); } void Triangle::cleanup() { @@ -401,8 +409,6 @@ void Triangle::cleanup() { init.disp.destroyRenderPass(this->render_pass, nullptr); } -const int NUM_FRAMES = 10; - void Triangle::setup() { auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); @@ -430,12 +436,6 @@ void Triangle::setup() this->sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); } -bool Triangle::frame(const int frame_num) -{ - draw_frame(); - return frame_num >= NUM_FRAMES; -} - GFXRECON_END_NAMESPACE(triangle) GFXRECON_END_NAMESPACE(test_app) From f7de6955fa7e0a4beadee1429e8107d876fed73a Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 21 Oct 2024 15:49:58 -0400 Subject: [PATCH 19/70] Include cleanup --- test/test_apps/common/test_app_base.cpp | 32 ------------------------- test/test_apps/common/test_app_base.h | 11 ++++----- test/test_apps/triangle/triangle.cpp | 1 - 3 files changed, 5 insertions(+), 39 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index af7c5d8fcc..796541a3d1 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -23,8 +23,6 @@ #include "test_app_base.h" -#include - #if defined(_WIN32) #include #ifndef NOMINMAX @@ -396,36 +394,6 @@ template void setup_pNext_chain(T& structure, std::vector(err)); } -}; -const InstanceErrorCategory instance_error_category; - -struct PhysicalDeviceErrorCategory : std::error_category { - const char* name() const noexcept override { return "gfxrecon_test_physical_device"; } - std::string message(int err) const override { return to_string(static_cast(err)); } -}; -const PhysicalDeviceErrorCategory physical_device_error_category; - -struct QueueErrorCategory : std::error_category { - const char* name() const noexcept override { return "gfxrecon_test_queue"; } - std::string message(int err) const override { return to_string(static_cast(err)); } -}; -const QueueErrorCategory queue_error_category; - -struct DeviceErrorCategory : std::error_category { - const char* name() const noexcept override { return "gfxrecon_test_device"; } - std::string message(int err) const override { return to_string(static_cast(err)); } -}; -const DeviceErrorCategory device_error_category; - -struct SwapchainErrorCategory : std::error_category { - const char* name() const noexcept override { return "gfxrecon_test_swapchain"; } - std::string message(int err) const override { return to_string(static_cast(err)); } -}; -const SwapchainErrorCategory swapchain_error_category; - GFXRECON_END_NAMESPACE(detail) #define CASE_TO_STRING(CATEGORY, TYPE) \ diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 9d47b5157e..0659632767 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -24,24 +24,23 @@ #ifndef GFXRECON_TEST_APP_BASE_H #define GFXRECON_TEST_APP_BASE_H -#include -#include #include #include #include +#include +#include #include #include -#include #include -#include "test_app_dispatch.h" -#include "util/defines.h" - #include #include +#include "test_app_dispatch.h" +#include "util/defines.h" + #ifdef VK_MAKE_API_VERSION #define VKB_MAKE_VK_VERSION(variant, major, minor, patch) VK_MAKE_API_VERSION(variant, major, minor, patch) #elif defined(VK_MAKE_VERSION) diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/triangle.cpp index 69f798a520..1628bac554 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/triangle.cpp @@ -21,7 +21,6 @@ ** DEALINGS IN THE SOFTWARE. */ -#include #include #include From 8aa0d84b1c2e28f9051bfd1da4ebf38b15bd5e54 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Tue, 22 Oct 2024 15:34:49 -0400 Subject: [PATCH 20/70] Multisample depth test app --- .../vk_enum_string_helper.h | 41 +- test/test_apps/CMakeLists.txt | 1 + .../common/CurrentBuildVulkanVersion.cmake | 2 + test/test_apps/common/generate_dispatch.py | 422 +++++++++++ test/test_apps/common/test_app_base.cpp | 73 +- test/test_apps/common/test_app_base.h | 24 +- test/test_apps/common/test_app_dispatch.h | 105 ++- .../multisample-depth/CMakeLists.txt | 69 ++ test/test_apps/multisample-depth/app.cpp | 665 ++++++++++++++++++ .../multisample-depth/shaders/frag.spv | Bin 0 -> 608 bytes .../multisample-depth/shaders/vert.spv | Bin 0 -> 1540 bytes test/test_apps/triangle/CMakeLists.txt | 5 +- .../triangle/{triangle.cpp => app.cpp} | 27 +- 13 files changed, 1297 insertions(+), 137 deletions(-) create mode 100644 test/test_apps/common/CurrentBuildVulkanVersion.cmake create mode 100644 test/test_apps/common/generate_dispatch.py create mode 100644 test/test_apps/multisample-depth/CMakeLists.txt create mode 100644 test/test_apps/multisample-depth/app.cpp create mode 100644 test/test_apps/multisample-depth/shaders/frag.spv create mode 100644 test/test_apps/multisample-depth/shaders/vert.spv rename test/test_apps/triangle/{triangle.cpp => app.cpp} (96%) diff --git a/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h b/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h index eebd7468a2..6cd51f69ef 100644 --- a/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h +++ b/external/Vulkan-Utility-Libraries/vk_enum_string_helper.h @@ -726,6 +726,14 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR"; case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD: return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD"; + case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: + return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; + case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT: + return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD: + return "VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD"; + case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX: + return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX"; case VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP: return "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: @@ -808,8 +816,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX"; - case VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX: - return "VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX"; case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: @@ -910,8 +916,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX: return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX"; #endif // VK_ENABLE_BETA_EXTENSIONS - case VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD: - return "VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD"; case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT: return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT"; case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: @@ -1088,10 +1092,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV"; case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV"; - case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: - return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV"; - case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV: - return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL"; case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL: @@ -1122,8 +1122,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT"; case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT"; - case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT: - return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT"; case VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: return "VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; case VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR: @@ -1134,8 +1132,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR"; - case VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: - return "VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: @@ -1386,6 +1382,10 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT"; case VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT: return "VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT"; + case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: + return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV"; + case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV: + return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT: @@ -1490,8 +1490,6 @@ static inline const char* string_VkStructureType(VkStructureType input_value) { return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT"; case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT"; - //case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT: - //return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT"; case VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA: return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA"; case VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA: @@ -3678,8 +3676,6 @@ static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value) return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR"; case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR"; - //case VK_PRESENT_MODE_FIFO_LATEST_READY_EXT: - //return "VK_PRESENT_MODE_FIFO_LATEST_READY_EXT"; default: return "Unhandled VkPresentModeKHR"; } @@ -6019,6 +6015,10 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi return "VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT"; case VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT: return "VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT"; + case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: + return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; + case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT: + return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR: return "VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR"; case VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR: @@ -6035,10 +6035,6 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi return "VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR"; case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV: return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV"; - case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT: - return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT"; - case VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR: - return "VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR"; case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR: return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR"; case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR: @@ -8429,9 +8425,6 @@ static inline const char* string_VkPipelineCreateFlagBits2KHR(uint64_t input_val if (input_value == VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR) return "VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR"; if (input_value == VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR) return "VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR"; if (input_value == VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR) return "VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR"; -#ifdef VK_ENABLE_BETA_EXTENSIONS - //if (input_value == VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX) return "VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX"; -#endif // VK_ENABLE_BETA_EXTENSIONS if (input_value == VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT) return "VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT"; if (input_value == VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR) return "VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR"; if (input_value == VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR) return "VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR"; @@ -8492,9 +8485,7 @@ static inline const char* string_VkBufferUsageFlagBits2KHR(uint64_t input_value) if (input_value == VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR"; if (input_value == VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR"; if (input_value == VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR) return "VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR"; -#ifdef VK_ENABLE_BETA_EXTENSIONS if (input_value == VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX) return "VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX"; -#endif // VK_ENABLE_BETA_EXTENSIONS if (input_value == VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT) return "VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT"; if (input_value == VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR) return "VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR"; if (input_value == VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT) return "VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT"; diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt index 6425db96a0..2241ef3611 100644 --- a/test/test_apps/CMakeLists.txt +++ b/test/test_apps/CMakeLists.txt @@ -29,3 +29,4 @@ add_custom_target(gfxrecon-testapps) add_subdirectory(triangle) +add_subdirectory(multisample-depth) diff --git a/test/test_apps/common/CurrentBuildVulkanVersion.cmake b/test/test_apps/common/CurrentBuildVulkanVersion.cmake new file mode 100644 index 0000000000..1571568d94 --- /dev/null +++ b/test/test_apps/common/CurrentBuildVulkanVersion.cmake @@ -0,0 +1,2 @@ +set(VK_BOOTSTRAP_SOURCE_HEADER_VERSION 1.3.296) +set(VK_BOOTSTRAP_SOURCE_HEADER_VERSION_GIT_TAG v1.3.296) diff --git a/test/test_apps/common/generate_dispatch.py b/test/test_apps/common/generate_dispatch.py new file mode 100644 index 0000000000..19b807d1af --- /dev/null +++ b/test/test_apps/common/generate_dispatch.py @@ -0,0 +1,422 @@ + +# +# generate_dispatch.py +# +# Copyright © 2021 Cody Goodson (contact@vibimanx.com) +# Copyright © 2022 Charles Giessen (charles@lunarg.com) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +# documentation files (the “Software”), to deal in the Software without restriction, including without +# limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +# of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +# LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +# This file is a part of VkBootstrap +# https://github.com/charles-lunarg/vk-bootstrap + +# On run, vk.xml is pulled from the master of Khronos's Vulkan-Headers repo and a VkBoostrapDispatch header +# is generated and placed in VkBoostrap's source directory +# https://raw.githubusercontent.com/KhronosGroup/Vulkan-Headers/master/registry/vk.xml + +# This script makes use of xmltodict +# https://github.com/martinblech/xmltodict +# User will be prompted to install if not detected + +# Command Line Arguments +# [--auto] Don't ask for input from the command line + + +import sys +import os +import subprocess +import copy +import codecs +import re +from string import Template +import urllib.request +import pkg_resources + +# Exclusions +exclusions = [ + 'vkGetDeviceProcAddr', + 'vkCreateDevice', + 'vkDestroyDevice' +] + +# Excluded extension authors - don't generate anything for these types of extensions +excluded_extension_authors = [ + 'NVX' +] + +excluded_alias_types = [ + 'VkPipelineInfoKHR' +] + +# Check for/install xmltodict +installed = {pkg.key for pkg in pkg_resources.working_set} +xmltodict_missing = {'xmltodict'} - installed + +# Install xmltodict +if xmltodict_missing: + if '--auto' not in sys.argv: + val = input('xmltodict is required to run this script. Would you like to install? (y/n): ') + else: + val = 'y' + if val.lower() == 'y': + try: + subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'xmltodict']) + except subprocess.CalledProcessError as error: + print('Failed to install xmltodict due to error:') + print(error) + if '--auto' not in sys.argv: + input('Press Enter to continue...') + sys.exit() + else: + sys.exit() + +# Fetch fresh vk.xml from Khronos repo +import xmltodict + +try: + response = urllib.request.urlopen('https://raw.githubusercontent.com/KhronosGroup/Vulkan-Headers/refs/heads/vulkan-sdk-1.3.296/registry/vk.xml') +except urllib.error.URLError as error: + print('Failed to download vk.xml due to error:') + print(error.reason) + if '-q' not in sys.argv: + input('Press Enter to continue...') + sys.exit() +vk_xml_raw = response.read() + +vk_xml = xmltodict.parse(vk_xml_raw,process_namespaces=True) + +command_params = {'return_type': '', 'args': [], 'dispatch_type': '', 'requirements': [], 'macro_template': Template('')} + +commands = {} + +INSTANCE = 'instance' +DEVICE = 'device' + +# No good way to detect incompatibilities with the macro defines and the actual functions. Just keep a list here +HEADER_VERSION_WORKAROUNDS = { + 'vkGetLatencyTimingsNV': '271', # Changed API parameters + 'vkCmdSetDiscardRectangleEnableEXT': '241', # new function in older extension + 'vkCmdSetDiscardRectangleModeEXT': '241', # new function in older extension + 'vkCmdSetExclusiveScissorEnableNV': '241', # new function in older extension + 'vkCmdInitializeGraphScratchMemoryAMDX': '298', # Changed API parameters + 'vkCmdDispatchGraphAMDX': '298', # Changed API parameters + 'vkCmdDispatchGraphIndirectAMDX': '298', # Changed API parameters + 'vkCmdDispatchGraphIndirectCountAMDX': '298', # Changed API parameters +} + +def get_macro_guard(reqs_collection, command_name): + guard = '' + count = len(reqs_collection) + if count > 0: + while count > 0: + for reqs in reqs_collection: + reqs_count = len(reqs) + guard += '(' + for req in reqs: + guard += f'defined({req})' + reqs_count -= 1 + if reqs_count > 0: + guard += ' && ' + guard += ')' + if count > 0: + count -= 1 + if count > 0: + guard += ' || ' + # API breaking change causes this function to fail compilation + for function, version in HEADER_VERSION_WORKAROUNDS.items(): + if command_name == function: + guard = f'({guard}) && VK_HEADER_VERSION >= {version}' + return guard + + +aliased_types = {} +types_node = vk_xml['registry']['types']['type'] +for type_node in types_node: + if '@alias' in type_node: + aliased_types[type_node['@alias']] = type_node['@name'] + +# Gather all device functions/aliases for filtering core/extension function fetching +commands_node = vk_xml['registry']['commands']['command'] +aliases = {} +for command_node in commands_node: + if 'proto' in command_node: + command_name = command_node['proto']['name'] + new_command_params = copy.deepcopy(command_params) + new_command_params['return_type'] = command_node['proto']['type'] + if isinstance(command_node['param'], list): + params = command_node['param'] + else: + params = [command_node['param']] + new_command_params['args'] = [] + for param in params: + # if the api attribute does exist, make sure it is for vulkan + if not '@api' in param or param['@api'] == 'vulkan': + new_command_params['args'].append(param) + if not command_name in exclusions: + commands[command_name] = new_command_params + commands[command_name]['is_alias'] = False + if new_command_params['args'][0]['type'] in ['VkDevice', 'VkCommandBuffer', 'VkQueue']: + commands[command_name]['dispatch_type'] = DEVICE + elif new_command_params['args'][0]['type'] in ['VkInstance', 'VkPhysicalDevice']: + commands[command_name]['dispatch_type'] = INSTANCE + elif '@alias' in command_node: + aliases[command_node['@alias']] = command_node['@name'] + +# Push the alias name as a device function if the alias exists in device commands +for aliased_type, alias in aliases.items(): + if aliased_type in commands: + commands[alias] = copy.deepcopy(commands[aliased_type]) + commands[alias]['is_alias'] = True + +# Add requirements for core PFN's +features_node = vk_xml['registry']['feature'] +for feature_node in features_node: + if feature_node['@name'] != 'VK_VERSION_1_0': + for require_node in feature_node['require']: + for param_node in require_node: + if param_node == 'command': + if not isinstance(require_node[param_node], list): + require_node[param_node] = [require_node[param_node]] + for param in require_node[param_node]: + if param['@name'] in commands: + commands[param['@name']]['requirements'] += [[feature_node['@name']]] + + +# Add requirements for extension PFN's +extensions_node = vk_xml['registry']['extensions']['extension'] +for extension_node in extensions_node: + extension_name = extension_node['@name'] + if 'require' in extension_node.keys(): + require_nodes = extension_node['require'] + for require_node in require_nodes: + requirements = [extension_name] + if not isinstance(require_node, str): + if 'command' in require_node.keys(): + if '@feature' in require_node.keys(): + requirements.append(require_node['@feature']) + if '@extension' in require_node.keys(): + requirements.extend(require_node['@extension'].split(',')) + if not isinstance(require_node['command'], list): + require_node['command'] = [require_node['command']] + for command_node in require_node['command']: + if command_node['@name'] in commands: + if '@author' in extension_node and extension_node['@author'] in excluded_extension_authors: + commands.pop(command_node['@name']) + else: + commands[command_node['@name']]['requirements'] += [requirements] + elif require_node == 'command': + if not isinstance(require_nodes['command'], list): + require_nodes['command'] = [require_nodes['command']] + for command_node in require_nodes['command']: + if command_node['@name'] in commands: + if '@author' in extension_node and extension_node['@author'] in excluded_extension_authors: + commands.pop(command_node['@name']) + else: + commands[command_node['@name']]['requirements'] += [requirements] + +# Generate macro templates +for command_name, command in commands.items(): + if len(commands[command_name]['requirements']) > 0: + macro_guard = get_macro_guard(commands[command_name]['requirements'], command_name) + macro = f'#if {macro_guard}\n$body#endif\n' + else: + macro = '$body' + commands[command_name]['macro_template'] = Template(macro) + +for command_name, command in commands.items(): + if len(commands[command_name]['requirements']) > 0: + macro_guard = get_macro_guard(commands[command_name]['requirements'], command_name) + pfn_decl_macro = f'#if {macro_guard}\n$body#else\n void * fp_{command_name}{{}};\n#endif\n' + else: + pfn_decl_macro = '$body' + commands[command_name]['pfn_decl_macro_template'] = Template(pfn_decl_macro) + +# License +dispatch_license = '''/* + * Copyright © 2021 Cody Goodson (contact@vibimanx.com) + * Copyright © 2022 Charles Giessen (charles@lunarg.com) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the “Software”), to deal in the Software without restriction, including without + * limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + * of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT + * LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * + */ +''' + +# Info +info = '// This file is a part of VkBootstrap\n' +info += '// https://github.com/charles-lunarg/vk-bootstrap\n\n' + +# # Content +head = '\n#pragma once\n\n#include \n\n' +head += '' +head += 'namespace vkb {\n\n' + +def create_dispatch_table(dispatch_type): + out = '' + if dispatch_type == INSTANCE: + out += 'struct InstanceDispatchTable {\n' + out += ' InstanceDispatchTable() = default;\n' + out += ' InstanceDispatchTable(VkInstance instance, PFN_vkGetInstanceProcAddr procAddr) : instance(instance), populated(true) {\n' + else: + out += 'struct DispatchTable {\n' + out += ' DispatchTable() = default;\n' + out += ' DispatchTable(VkDevice device, PFN_vkGetDeviceProcAddr procAddr) : device(device), populated(true) {\n' + + proxy_section = '' + fp_decl_section = '' + pfn_load_section = '' + + proxy_template = Template(' $return_type $proxy_name($args_full) const noexcept {\n $opt_return$fp_name($args_names);\n }\n') + fp_decl_template = Template(' $pfn_name $fp_name = nullptr;\n') + if dispatch_type == INSTANCE: + pfn_load_template = Template(' $fp_name = reinterpret_cast<$pfn_name>(procAddr(instance, "$command_name"));\n') + else: + pfn_load_template = Template(' $fp_name = reinterpret_cast<$pfn_name>(procAddr(device, "$command_name"));\n') + + for command_name, command in commands.items(): + if command['dispatch_type'] != dispatch_type: + continue + params = commands[command_name] + # easy stuff out of the way + return_type = params['return_type'] + if return_type != 'void': + opt_return = 'return ' + else: + opt_return = '' + proxy_name = command_name[2].lower() + command_name[3:] + fp_name = 'fp_' + command_name + pfn_name = 'PFN_' + command_name + + # Now for args + arg_template = Template('$front_mods$arg_type$back_mods$arg_name$array') + args_full = '' + args_names = '' + args_count = len(params['args']) + i = args_count + for arg in params['args']: + front_mods = '' + back_mods = ' ' + array = '' + arg_type = arg['type'] + arg_name = arg['name'] + if '#text' in arg: + text = arg['#text'] + text = text.replace(' ', '') + array_index = text.find('[') + if array_index != -1: + array = text[array_index:] + text = text[0:array_index] + if text == '*': + front_mods = '' + back_mods = '* ' + elif text == '**': + front_mods = '' + back_mods = '** ' + elif text == 'struct*': + front_mods = 'struct ' + back_mods = '* ' + elif text == 'struct**': + front_mods = 'struct ' + back_mods = '** ' + elif text == 'const': + front_mods = 'const ' + back_mods = ' ' + elif text == 'const*': + front_mods = 'const ' + back_mods = '* ' + elif text == 'const**': + front_mods = 'const ' + back_mods = '** ' + elif text == 'const*const*': + front_mods = 'const ' + back_mods = '* const* ' + elif text == 'conststruct*': + front_mods = 'const struct ' + back_mods = '* ' + else: + print("Unhandled Text Case!") + assert(False) + if i == args_count and (dispatch_type == INSTANCE and arg_type == 'VkInstance') or (dispatch_type == DEVICE and arg_type == 'VkDevice'): + args_names += arg_name + if i > 0: + i -= 1 + if i > 0: + args_names += ', ' + else: + if arg_type in aliased_types and arg_type not in excluded_alias_types: + arg_type = aliased_types[arg_type] + args_full += arg_template.substitute(front_mods = front_mods, arg_type = arg_type, back_mods = back_mods, arg_name = arg_name, array = array) + args_names += arg_name + if i > 0: + i -= 1 + if i > 0: + args_full += ', ' + args_names += ', ' + + proxy_body = proxy_template.substitute(return_type = return_type, proxy_name = proxy_name, args_full = args_full, opt_return = opt_return, fp_name = fp_name, args_names = args_names) + fp_decl_body = fp_decl_template.substitute(pfn_name = pfn_name, fp_name = fp_name) + pfn_load_body = pfn_load_template.substitute(fp_name = fp_name, pfn_name = pfn_name, command_name = command_name) + + macro_template = params['macro_template'] + pfn_decl_macro_template = params['pfn_decl_macro_template'] + proxy_section += macro_template.substitute(body=proxy_body) + fp_decl_section += pfn_decl_macro_template.substitute(body=fp_decl_body) + pfn_load_section += macro_template.substitute(body=pfn_load_body) + + out += pfn_load_section + out += ' }\n' + out += proxy_section + out += fp_decl_section + out += ' bool is_populated() const { return populated; }\n' + if dispatch_type == INSTANCE: + out += ' VkInstance instance = VK_NULL_HANDLE;\n' + else: + out += ' VkDevice device = VK_NULL_HANDLE;\n' + out += 'private:\n' + out += ' bool populated = false;\n' + out += '};\n\n' + return out + +tail = '} // namespace vkb' + +# find the version used to generate the code +for type_node in types_node: + if '@api' in type_node and type_node['@api'] == 'vulkan' and 'name' in type_node and type_node['name'] == 'VK_HEADER_VERSION_COMPLETE': + complete_header_version = type_node['#text'] + if '@api' in type_node and type_node['@api'] == 'vulkan' and 'name' in type_node and type_node['name'] == 'VK_HEADER_VERSION': + vk_header_version = type_node['#text'] +find_number_fields = re.compile('[0-9]+') +version_fields = find_number_fields.findall(complete_header_version) +header_version_field = find_number_fields.findall(vk_header_version)[0] +version_tag = f'{version_fields[1]}.{version_fields[2]}.{header_version_field}' + +header_file = codecs.open(os.path.join('test_app_dispatch.h'), 'w', 'utf-8') +header_file.write(dispatch_license + info + head + create_dispatch_table('instance') + create_dispatch_table('device') + tail) +header_file.close() + +# Generate a CMake file that contains the header version used. +cmake_version_file = codecs.open(os.path.join('CurrentBuildVulkanVersion.cmake'), 'w', 'utf-8') +cmake_version_file.write(f'set(VK_BOOTSTRAP_SOURCE_HEADER_VERSION {version_tag})\n') +cmake_version_file.write(f'set(VK_BOOTSTRAP_SOURCE_HEADER_VERSION_GIT_TAG v{version_tag})\n') +cmake_version_file.close() + +print('Generation finished.') \ No newline at end of file diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 796541a3d1..e45ae47405 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -593,7 +593,7 @@ void destroy_instance(Instance const& instance) { Instance::operator VkInstance() const { return this->instance; } -InstanceDispatchTable Instance::make_table() const { return { instance, fp_vkGetInstanceProcAddr }; } +vkb::InstanceDispatchTable Instance::make_table() const { return { instance, fp_vkGetInstanceProcAddr }; } InstanceBuilder::InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { info.fp_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; @@ -1635,7 +1635,7 @@ std::optional Device::get_dedicated_queue(QueueType type) const { // ---- Dispatch ---- // -DispatchTable Device::make_table() const { return { device, fp_vkGetDeviceProcAddr }; } +vkb::DispatchTable Device::make_table() const { return { device, fp_vkGetDeviceProcAddr }; } // ---- Device ---- // @@ -2223,15 +2223,14 @@ VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllo return surface; } -void create_swapchain(Device const& device, Swapchain& swapchain) { - SwapchainBuilder swapchain_builder{ device }; +void create_swapchain(SwapchainBuilder& swapchain_builder, Swapchain& swapchain) { auto new_swapchain = swapchain_builder.set_old_swapchain(swapchain).build(); destroy_swapchain(swapchain); swapchain = new_swapchain; } VkCommandPool create_command_pool( - DispatchTable const& disp, + vkb::DispatchTable const& disp, uint32_t queue_family_index ) { VkCommandPoolCreateInfo pool_info = {}; @@ -2244,7 +2243,7 @@ VkCommandPool create_command_pool( return command_pool; } -Sync create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight) { +Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& disp, const int max_frames_in_flight) { Sync sync; sync.available_semaphores.resize(max_frames_in_flight); @@ -2289,7 +2288,7 @@ std::vector readFile(const std::string& filename) { return buffer; } -VkShaderModule createShaderModule(DispatchTable const& disp, const std::vector& code) { +VkShaderModule createShaderModule(vkb::DispatchTable const& disp, const std::vector& code) { VkShaderModuleCreateInfo create_info = {}; create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; create_info.codeSize = code.size(); @@ -2302,7 +2301,7 @@ VkShaderModule createShaderModule(DispatchTable const& disp, const std::vector code = readFile(filename); return createShaderModule(disp, code); } @@ -2321,7 +2320,7 @@ std::exception sdl_exception() { void device_initialization_phase_1(const std::string& window_name, Init& init) { - init.window = gfxrecon::test::create_window_sdl(window_name.data(), true, 1024, 1024); + init.window = create_window_sdl(window_name.data(), true, 1024, 1024); } void device_initialization_phase_2(InstanceBuilder const& instance_builder, Init& init) @@ -2330,7 +2329,7 @@ void device_initialization_phase_2(InstanceBuilder const& instance_builder, Init init.inst_disp = init.instance.make_table(); - init.surface = gfxrecon::test::create_surface_sdl(init.instance, init.window); + init.surface = create_surface_sdl(init.instance, init.window); } PhysicalDevice device_initialization_phase_3(PhysicalDeviceSelector& phys_device_selector, Init& init) @@ -2343,8 +2342,11 @@ void device_initialization_phase_4(DeviceBuilder const& device_builder, Init& in init.device = device_builder.build(); init.disp = init.device.make_table(); +} - gfxrecon::test::create_swapchain(init.device, init.swapchain); +void device_initialization_phase_5(SwapchainBuilder& swapchain_builder, Init& init) +{ + create_swapchain(swapchain_builder, init.swapchain); init.swapchain_images = init.swapchain.get_images(); init.swapchain_image_views = init.swapchain.get_image_views(); @@ -2355,34 +2357,37 @@ Init device_initialization(const std::string& window_name) { device_initialization_phase_1(window_name, init); - gfxrecon::test::InstanceBuilder instance_builder; + InstanceBuilder instance_builder; device_initialization_phase_2(instance_builder, init); - gfxrecon::test::PhysicalDeviceSelector phys_device_selector(init.instance); - auto physical_device = device_initialization_phase_3(phys_device_selector, init); + PhysicalDeviceSelector phys_device_selector(init.instance); + init.physical_device = device_initialization_phase_3(phys_device_selector, init); - gfxrecon::test::DeviceBuilder device_builder{ physical_device }; + DeviceBuilder device_builder{ init.physical_device }; device_initialization_phase_4(device_builder, init); + SwapchainBuilder swapchain_builder{ init.device }; + device_initialization_phase_5(swapchain_builder, init); + return init; } -void cleanup_init(gfxrecon::test::Init& init) { +void cleanup_init(Init& init) { init.swapchain.destroy_image_views(init.swapchain_image_views); - gfxrecon::test::destroy_swapchain(init.swapchain); - gfxrecon::test::destroy_device(init.device); - gfxrecon::test::destroy_surface(init.instance, init.surface); - gfxrecon::test::destroy_instance(init.instance); - gfxrecon::test::destroy_window_sdl(init.window); + destroy_swapchain(init.swapchain); + destroy_device(init.device); + destroy_surface(init.instance, init.surface); + destroy_instance(init.instance); + destroy_window_sdl(init.window); } -void recreate_swapchain(gfxrecon::test::Init& init, bool wait_for_idle) { +void recreate_init_swapchain(SwapchainBuilder& swapchain_builder, Init& init, bool wait_for_idle) { if (wait_for_idle) init.disp.deviceWaitIdle(); init.swapchain.destroy_image_views(init.swapchain_image_views); - gfxrecon::test::create_swapchain(init.device, init.swapchain); + create_swapchain(swapchain_builder, init.swapchain); init.swapchain_images = init.swapchain.get_images(); init.swapchain_image_views = init.swapchain.get_image_views(); @@ -2392,18 +2397,22 @@ void TestAppBase::run(const std::string& window_name) { device_initialization_phase_1(window_name, this->init); - gfxrecon::test::InstanceBuilder instance_builder; + InstanceBuilder instance_builder; this->configure_instance_builder(instance_builder); device_initialization_phase_2(instance_builder, this->init); - gfxrecon::test::PhysicalDeviceSelector phys_device_selector(this->init.instance); + PhysicalDeviceSelector phys_device_selector(this->init.instance); this->configure_physical_device_selector(phys_device_selector); - auto physical_device = device_initialization_phase_3(phys_device_selector, this->init); + init.physical_device = device_initialization_phase_3(phys_device_selector, this->init); - gfxrecon::test::DeviceBuilder device_builder{ physical_device }; - this->configure_device_builder(device_builder, physical_device); + DeviceBuilder device_builder{ init.physical_device }; + this->configure_device_builder(device_builder, init.physical_device); device_initialization_phase_4(device_builder, this->init); + SwapchainBuilder swapchain_builder{ init.device }; + this->configure_swapchain_builder(swapchain_builder); + device_initialization_phase_5(swapchain_builder, this->init); + this->setup(); bool running = true; @@ -2427,6 +2436,13 @@ void TestAppBase::run(const std::string& window_name) cleanup_init(this->init); } +void TestAppBase::recreate_swapchain(bool wait_for_idle) +{ + SwapchainBuilder swapchain_builder{ init.device }; + this->configure_swapchain_builder(swapchain_builder); + recreate_init_swapchain(swapchain_builder, init, wait_for_idle); +} + void TestAppBase::setup() {} void TestAppBase::cleanup() {} void TestAppBase::configure_instance_builder(InstanceBuilder& instance_builder) { @@ -2434,6 +2450,7 @@ void TestAppBase::configure_instance_builder(InstanceBuilder& instance_builder) } void TestAppBase::configure_physical_device_selector(PhysicalDeviceSelector& phys_device_selector) {} void TestAppBase::configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device) {} +void TestAppBase::configure_swapchain_builder(SwapchainBuilder& swapchain_builder) {} GFXRECON_END_NAMESPACE(test) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 0659632767..02048420f3 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -231,7 +231,7 @@ struct Instance { operator VkInstance() const; // Return a loaded instance dispatch table - InstanceDispatchTable make_table() const; + vkb::InstanceDispatchTable make_table() const; private: bool headless = false; @@ -678,7 +678,7 @@ struct Device { std::optional get_dedicated_queue(QueueType type) const; // Return a loaded dispatch table - DispatchTable make_table() const; + vkb::DispatchTable make_table() const; // A conversion function which allows this Device to be used // in places where VkDevice would have been used. @@ -912,9 +912,9 @@ class SwapchainBuilder { SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height); void destroy_window_sdl(SDL_Window * window); VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); -void create_swapchain(Device const&, Swapchain& swapchain); +void create_swapchain(SwapchainBuilder& swapchain_builder, Swapchain& swapchain); -VkCommandPool create_command_pool(DispatchTable const& disp, uint32_t queue_family_index); +VkCommandPool create_command_pool(vkb::DispatchTable const& disp, uint32_t queue_family_index); struct Sync { std::vector available_semaphores; @@ -932,23 +932,24 @@ struct Sync { Sync& operator =(Sync&&) = default; }; -Sync create_sync_objects(Swapchain const& swapchain, DispatchTable const& disp, const int max_frames_in_flight); +Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& disp, const int max_frames_in_flight); std::vector readFile(const std::string& filename); -VkShaderModule createShaderModule(DispatchTable const& disp, const std::vector& code); +VkShaderModule createShaderModule(vkb::DispatchTable const& disp, const std::vector& code); -VkShaderModule readShaderFromFile(DispatchTable const& disp, const std::string& filename); +VkShaderModule readShaderFromFile(vkb::DispatchTable const& disp, const std::string& filename); #define VERIFY_VK_RESULT(message, result) { if (result != VK_SUCCESS) throw gfxrecon::test::vulkan_exception(message, result); } struct Init { SDL_Window* window; Instance instance; - InstanceDispatchTable inst_disp; + vkb::InstanceDispatchTable inst_disp; VkSurfaceKHR surface; + PhysicalDevice physical_device; Device device; - DispatchTable disp; + vkb::DispatchTable disp; Swapchain swapchain; std::vector swapchain_images; std::vector swapchain_image_views; @@ -958,7 +959,7 @@ Init device_initialization(const std::string& window_name); void cleanup_init(Init& init); -void recreate_swapchain(Init& init, bool wait_for_idle = true); +void recreate_init_swapchain(Init& init, bool wait_for_idle = true); class TestAppBase { public: @@ -971,12 +972,15 @@ class TestAppBase { TestAppBase(TestAppBase&&) = delete; TestAppBase& operator=(TestAppBase&&) = delete; + void recreate_swapchain(bool wait_for_idle); + virtual void setup(); virtual bool frame(const int frame_num) = 0; virtual void cleanup(); virtual void configure_instance_builder(InstanceBuilder& instance_builder); virtual void configure_physical_device_selector(PhysicalDeviceSelector& phys_device_selector); virtual void configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device); + virtual void configure_swapchain_builder(SwapchainBuilder& swapchain_builder); Init init; }; diff --git a/test/test_apps/common/test_app_dispatch.h b/test/test_apps/common/test_app_dispatch.h index f18fdea99a..d14588a614 100644 --- a/test/test_apps/common/test_app_dispatch.h +++ b/test/test_apps/common/test_app_dispatch.h @@ -1,36 +1,29 @@ /* -** Copyright (c) 2018-2023 Valve Corporation -** Copyright (c) 2018-2024 LunarG, Inc. -** -** Permission is hereby granted, free of charge, to any person obtaining a -** copy of this software and associated documentation files (the "Software"), -** to deal in the Software without restriction, including without limitation -** the rights to use, copy, modify, merge, publish, distribute, sublicense, -** and/or sell copies of the Software, and to permit persons to whom the -** Software is furnished to do so, subject to the following conditions: -** -** The above copyright notice and this permission notice shall be included in -** all copies or substantial portions of the Software. -** -** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -** DEALINGS IN THE SOFTWARE. -*/ + * Copyright © 2021 Cody Goodson (contact@vibimanx.com) + * Copyright © 2022 Charles Giessen (charles@lunarg.com) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the “Software”), to deal in the Software without restriction, including without + * limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + * of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT + * LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * + */ +// This file is a part of VkBootstrap +// https://github.com/charles-lunarg/vk-bootstrap -#ifndef GFXRECON_TEST_APP_DISPATCH_H -#define GFXRECON_TEST_APP_DISPATCH_H -#include - -#include "util/defines.h" +#pragma once -GFXRECON_BEGIN_NAMESPACE(gfxrecon) +#include -GFXRECON_BEGIN_NAMESPACE(test) +namespace vkb { struct InstanceDispatchTable { InstanceDispatchTable() = default; @@ -1346,13 +1339,14 @@ struct InstanceDispatchTable { #endif bool is_populated() const { return populated; } VkInstance instance = VK_NULL_HANDLE; - private: - bool populated = false; +private: + bool populated = false; }; struct DispatchTable { DispatchTable() = default; DispatchTable(VkDevice device, PFN_vkGetDeviceProcAddr procAddr) : device(device), populated(true) { + fp_vkGetDeviceProcAddr = procAddr; fp_vkGetDeviceQueue = reinterpret_cast(procAddr(device, "vkGetDeviceQueue")); fp_vkQueueSubmit = reinterpret_cast(procAddr(device, "vkQueueSubmit")); fp_vkQueueWaitIdle = reinterpret_cast(procAddr(device, "vkQueueWaitIdle")); @@ -2630,16 +2624,16 @@ struct DispatchTable { #if (defined(VK_AMDX_shader_enqueue)) fp_vkCreateExecutionGraphPipelinesAMDX = reinterpret_cast(procAddr(device, "vkCreateExecutionGraphPipelinesAMDX")); #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 fp_vkCmdInitializeGraphScratchMemoryAMDX = reinterpret_cast(procAddr(device, "vkCmdInitializeGraphScratchMemoryAMDX")); #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 fp_vkCmdDispatchGraphAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphAMDX")); #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 fp_vkCmdDispatchGraphIndirectAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectAMDX")); #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 fp_vkCmdDispatchGraphIndirectCountAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectCountAMDX")); #endif #if (defined(VK_KHR_maintenance6)) @@ -5176,24 +5170,24 @@ struct DispatchTable { return fp_vkCreateExecutionGraphPipelinesAMDX(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); } #endif -#if (defined(VK_AMDX_shader_enqueue)) - void cmdInitializeGraphScratchMemoryAMDX(VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, VkDeviceSize scratchSize) const noexcept { - fp_vkCmdInitializeGraphScratchMemoryAMDX(commandBuffer, executionGraph, scratch, scratchSize); +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 + void cmdInitializeGraphScratchMemoryAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch) const noexcept { + fp_vkCmdInitializeGraphScratchMemoryAMDX(commandBuffer, scratch); } #endif -#if (defined(VK_AMDX_shader_enqueue)) - void cmdDispatchGraphAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { - fp_vkCmdDispatchGraphAMDX(commandBuffer, scratch, scratchSize, pCountInfo); +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 + void cmdDispatchGraphAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { + fp_vkCmdDispatchGraphAMDX(commandBuffer, scratch, pCountInfo); } #endif -#if (defined(VK_AMDX_shader_enqueue)) - void cmdDispatchGraphIndirectAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { - fp_vkCmdDispatchGraphIndirectAMDX(commandBuffer, scratch, scratchSize, pCountInfo); +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 + void cmdDispatchGraphIndirectAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { + fp_vkCmdDispatchGraphIndirectAMDX(commandBuffer, scratch, pCountInfo); } #endif -#if (defined(VK_AMDX_shader_enqueue)) - void cmdDispatchGraphIndirectCountAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, VkDeviceAddress countInfo) const noexcept { - fp_vkCmdDispatchGraphIndirectCountAMDX(commandBuffer, scratch, scratchSize, countInfo); +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 + void cmdDispatchGraphIndirectCountAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo) const noexcept { + fp_vkCmdDispatchGraphIndirectCountAMDX(commandBuffer, scratch, countInfo); } #endif #if (defined(VK_KHR_maintenance6)) @@ -7655,22 +7649,22 @@ struct DispatchTable { #else void * fp_vkCreateExecutionGraphPipelinesAMDX{}; #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdInitializeGraphScratchMemoryAMDX fp_vkCmdInitializeGraphScratchMemoryAMDX = nullptr; #else void * fp_vkCmdInitializeGraphScratchMemoryAMDX{}; #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdDispatchGraphAMDX fp_vkCmdDispatchGraphAMDX = nullptr; #else void * fp_vkCmdDispatchGraphAMDX{}; #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdDispatchGraphIndirectAMDX fp_vkCmdDispatchGraphIndirectAMDX = nullptr; #else void * fp_vkCmdDispatchGraphIndirectAMDX{}; #endif -#if (defined(VK_AMDX_shader_enqueue)) +#if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdDispatchGraphIndirectCountAMDX fp_vkCmdDispatchGraphIndirectCountAMDX = nullptr; #else void * fp_vkCmdDispatchGraphIndirectCountAMDX{}; @@ -8087,12 +8081,9 @@ struct DispatchTable { #endif bool is_populated() const { return populated; } VkDevice device = VK_NULL_HANDLE; - private: - bool populated = false; + PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr; +private: + bool populated = false; }; -GFXRECON_END_NAMESPACE(gfxrecon) - -GFXRECON_END_NAMESPACE(test) - -#endif // GFXRECON_TEST_APP_DISPATCH_H +} // namespace vkb \ No newline at end of file diff --git a/test/test_apps/multisample-depth/CMakeLists.txt b/test/test_apps/multisample-depth/CMakeLists.txt new file mode 100644 index 0000000000..526a9a5e51 --- /dev/null +++ b/test/test_apps/multisample-depth/CMakeLists.txt @@ -0,0 +1,69 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for multisample depth test app +############################################################################### + +add_executable(gfxrecon-testapp-multisample-depth "") + +target_sources(gfxrecon-testapp-multisample-depth + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) + +target_include_directories(gfxrecon-testapp-multisample-depth PUBLIC + ${CMAKE_BINARY_DIR} + ${CMAKE_CURRENT_LIST_DIR}/../common) + +target_link_libraries(gfxrecon-testapp-multisample-depth + vulkan_memory_allocator + gfxrecon_application + gfxrecon_util + SDL3::SDL3 + platform_specific) + +if (MSVC) + # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. + # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. + if(CMAKE_SIZEOF_VOID_P EQUAL 4) + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") + else() + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") + endif() +endif() + +common_build_directives(gfxrecon-testapp-multisample-depth) + +add_custom_command( + TARGET gfxrecon-testapp-multisample-depth + POST_BUILD + COMMAND + ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_LIST_DIR}/shaders "$" + DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) + +install(TARGETS gfxrecon-testapp-multisample-depth RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + +add_dependencies(gfxrecon-testapps gfxrecon-testapp-multisample-depth) \ No newline at end of file diff --git a/test/test_apps/multisample-depth/app.cpp b/test/test_apps/multisample-depth/app.cpp new file mode 100644 index 0000000000..56cf47f5a2 --- /dev/null +++ b/test/test_apps/multisample-depth/app.cpp @@ -0,0 +1,665 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include + +#include +#include + +#include + +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test_app) + +GFXRECON_BEGIN_NAMESPACE(multisample_depth) + +const int MAX_FRAMES_IN_FLIGHT = 2; + +class App : public gfxrecon::test::TestAppBase { + public: + App() = default; + private: + VkQueue graphics_queue; + VkQueue present_queue; + + std::vector depth_images; + std::vector depth_image_allocations; + std::vector depth_image_views; + + std::vector render_targets; + std::vector render_target_allocations; + std::vector render_target_views; + + std::vector framebuffers; + + VkRenderPass render_pass; + VkPipelineLayout pipeline_layout; + VkPipeline graphics_pipeline; + + VkCommandPool command_pool; + std::vector command_buffers; + + size_t current_frame = 0; + + gfxrecon::test::Sync sync; + VmaAllocator allocator; + + void create_allocator(); + void create_render_targets(); + void create_depth_buffers(); + void create_render_pass(); + void create_graphics_pipeline(); + void create_framebuffers(); + void create_command_buffers(); + void recreate_swapchain(); + void cleanup() override; + bool frame(const int frame_num) override; + void setup() override; + void configure_swapchain_builder(gfxrecon::test::SwapchainBuilder &swapchain_builder) override; +}; + +void App::configure_swapchain_builder(gfxrecon::test::SwapchainBuilder &swapchain_builder) { + swapchain_builder.add_image_usage_flags(VK_IMAGE_USAGE_TRANSFER_DST_BIT); +} + +void App::create_allocator() +{ + VmaVulkanFunctions vulkan_functions = {}; + vulkan_functions.vkGetInstanceProcAddr = init.inst_disp.fp_vkGetInstanceProcAddr; + vulkan_functions.vkGetDeviceProcAddr = init.disp.fp_vkGetDeviceProcAddr; + + VmaAllocatorCreateInfo allocator_create_info = {}; + allocator_create_info.physicalDevice = init.physical_device; + allocator_create_info.device = init.device; + allocator_create_info.instance = init.instance; + allocator_create_info.pVulkanFunctions = &vulkan_functions; + vmaCreateAllocator(&allocator_create_info, &this->allocator); +} + +void App::create_render_targets() +{ + render_targets.resize(init.swapchain_image_views.size()); + render_target_allocations.resize(init.swapchain_image_views.size()); + render_target_views.resize(init.swapchain_image_views.size()); + + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + VkExtent3D extent = {}; + VkResult result; + + VkImageCreateInfo image_create_info = {}; + image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_create_info.imageType = VK_IMAGE_TYPE_2D; + image_create_info.format = init.swapchain.image_format; + image_create_info.extent.height = init.swapchain.extent.height; + image_create_info.extent.width = init.swapchain.extent.width; + image_create_info.extent.depth = 1; + image_create_info.mipLevels = 1; + image_create_info.arrayLayers = 1; + image_create_info.samples = VK_SAMPLE_COUNT_2_BIT; + image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT; + + VmaAllocationCreateInfo allocation_info = {}; + allocation_info.usage = VMA_MEMORY_USAGE_AUTO; + + result = vmaCreateImage(allocator, &image_create_info, &allocation_info, &render_targets[i], &render_target_allocations[i], nullptr); + VERIFY_VK_RESULT("failed to create render target", result); + + VkImageViewCreateInfo image_view_create_info = {}; + image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + image_view_create_info.image = this->render_targets[i]; + image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D; + image_view_create_info.format = image_create_info.format; + image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_view_create_info.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_view_create_info.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + result = init.disp.createImageView(&image_view_create_info, nullptr, &this->render_target_views[i]); + VERIFY_VK_RESULT("failed to create render target view", result); + } +} + +void App::create_depth_buffers() +{ + depth_images.resize(init.swapchain_image_views.size()); + depth_image_allocations.resize(init.swapchain_image_views.size()); + depth_image_views.resize(init.swapchain_image_views.size()); + + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + VkExtent3D extent = {}; + VkResult result; + + VkImageCreateInfo image_create_info = {}; + image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_create_info.imageType = VK_IMAGE_TYPE_2D; + image_create_info.format = VK_FORMAT_D16_UNORM; + image_create_info.extent.height = init.swapchain.extent.height; + image_create_info.extent.width = init.swapchain.extent.width; + image_create_info.extent.depth = 1; + image_create_info.mipLevels = 1; + image_create_info.arrayLayers = 1; + image_create_info.samples = VK_SAMPLE_COUNT_2_BIT; + image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; + + VmaAllocationCreateInfo allocation_info = {}; + allocation_info.usage = VMA_MEMORY_USAGE_AUTO; + + result = vmaCreateImage(allocator, &image_create_info, &allocation_info, &depth_images[i], &depth_image_allocations[i], nullptr); + VERIFY_VK_RESULT("failed to create depth buffer image", result); + + VkImageViewCreateInfo image_view_create_info = {}; + image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + image_view_create_info.image = this->depth_images[i]; + image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D; + image_view_create_info.format = image_create_info.format; + image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; + image_view_create_info.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_view_create_info.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + result = init.disp.createImageView(&image_view_create_info, nullptr, &this->depth_image_views[i]); + VERIFY_VK_RESULT("failed to create depth buffer image view", result); + } +} + +void App::create_render_pass() { + VkAttachmentDescription attachments[2]; + attachments[0] = {}; + attachments[0].format = init.swapchain.image_format; + attachments[0].samples = VK_SAMPLE_COUNT_2_BIT; + attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE; + attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + attachments[0].finalLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; + attachments[1] = {}; + attachments[1].format = VK_FORMAT_D16_UNORM; + attachments[1].samples = VK_SAMPLE_COUNT_2_BIT; + attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE; + attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + + VkAttachmentReference color_attachment_ref = {}; + color_attachment_ref.attachment = 0; + color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + + VkAttachmentReference depth_attachment_ref = {}; + depth_attachment_ref.attachment = 1; + depth_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + + VkSubpassDescription subpass = {}; + subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + subpass.colorAttachmentCount = 1; + subpass.pColorAttachments = &color_attachment_ref; + subpass.pDepthStencilAttachment = &depth_attachment_ref; + + VkSubpassDependency dependency = {}; + dependency.srcSubpass = VK_SUBPASS_EXTERNAL; + dependency.dstSubpass = 0; + dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; + dependency.srcAccessMask = 0; + dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT; + dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; + + VkRenderPassCreateInfo render_pass_info = {}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + render_pass_info.attachmentCount = 2; + render_pass_info.pAttachments = attachments; + render_pass_info.subpassCount = 1; + render_pass_info.pSubpasses = &subpass; + render_pass_info.dependencyCount = 1; + render_pass_info.pDependencies = &dependency; + + auto result = init.disp.createRenderPass(&render_pass_info, nullptr, &this->render_pass); + VERIFY_VK_RESULT("failed to create render pass", result); +} + +void App::create_graphics_pipeline() { + auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); + auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); + + VkPipelineShaderStageCreateInfo vert_stage_info = {}; + vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; + vert_stage_info.module = vert_module; + vert_stage_info.pName = "main"; + + VkPipelineShaderStageCreateInfo frag_stage_info = {}; + frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + frag_stage_info.module = frag_module; + frag_stage_info.pName = "main"; + + VkPipelineShaderStageCreateInfo shader_stages[] = { vert_stage_info, frag_stage_info }; + + VkPipelineVertexInputStateCreateInfo vertex_input_info = {}; + vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_input_info.vertexBindingDescriptionCount = 0; + vertex_input_info.vertexAttributeDescriptionCount = 0; + + VkPipelineInputAssemblyStateCreateInfo input_assembly = {}; + input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_assembly.primitiveRestartEnable = VK_FALSE; + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + VkPipelineViewportStateCreateInfo viewport_state = {}; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = &viewport; + viewport_state.scissorCount = 1; + viewport_state.pScissors = &scissor; + + VkPipelineRasterizationStateCreateInfo rasterizer = {}; + rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterizer.depthClampEnable = VK_FALSE; + rasterizer.rasterizerDiscardEnable = VK_FALSE; + rasterizer.polygonMode = VK_POLYGON_MODE_FILL; + rasterizer.lineWidth = 1.0f; + rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; + rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterizer.depthBiasEnable = VK_FALSE; + + VkPipelineMultisampleStateCreateInfo multisampling = {}; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_2_BIT; + + VkPipelineColorBlendAttachmentState colorBlendAttachment = {}; + colorBlendAttachment.colorWriteMask = + VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + colorBlendAttachment.blendEnable = VK_FALSE; + + VkPipelineColorBlendStateCreateInfo color_blending = {}; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &colorBlendAttachment; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; + + VkPipelineLayoutCreateInfo pipeline_layout_info = {}; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = 0; + pipeline_layout_info.pushConstantRangeCount = 0; + + auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &this->pipeline_layout); + VERIFY_VK_RESULT("failed to create pipeline layout", result); + + std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; + + VkPipelineDynamicStateCreateInfo dynamic_info = {}; + dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); + dynamic_info.pDynamicStates = dynamic_states.data(); + + VkPipelineDepthStencilStateCreateInfo depth_stencil = {}; + depth_stencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; + depth_stencil.depthTestEnable = true; + depth_stencil.depthWriteEnable = true; + depth_stencil.depthCompareOp = VK_COMPARE_OP_LESS; + + VkGraphicsPipelineCreateInfo pipeline_info = {}; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.stageCount = 2; + pipeline_info.pStages = shader_stages; + pipeline_info.pVertexInputState = &vertex_input_info; + pipeline_info.pInputAssemblyState = &input_assembly; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &rasterizer; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_info; + pipeline_info.layout = this->pipeline_layout; + pipeline_info.renderPass = this->render_pass; + pipeline_info.pDepthStencilState = &depth_stencil; + + result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &this->graphics_pipeline); + VERIFY_VK_RESULT("failed to create graphics pipeline", result); + + init.disp.destroyShaderModule(frag_module, nullptr); + init.disp.destroyShaderModule(vert_module, nullptr); +} + +void App::create_framebuffers() { + this->framebuffers.resize(init.swapchain_image_views.size()); + + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + VkImageView attachments[] = { render_target_views[i], depth_image_views[i] }; + + VkFramebufferCreateInfo framebuffer_info = {}; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.renderPass = this->render_pass; + framebuffer_info.attachmentCount = 2; + framebuffer_info.pAttachments = attachments; + framebuffer_info.width = init.swapchain.extent.width; + framebuffer_info.height = init.swapchain.extent.height; + framebuffer_info.layers = 1; + + auto result = init.disp.createFramebuffer(&framebuffer_info, nullptr, &this->framebuffers[i]); + VERIFY_VK_RESULT("failed to create framebuffer", result); + } +} + +void App::create_command_buffers() { + this->command_buffers.resize(MAX_FRAMES_IN_FLIGHT); + + VkCommandBufferAllocateInfo allocInfo = {}; + allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + allocInfo.commandPool = this->command_pool; + allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + allocInfo.commandBufferCount = (uint32_t)this->command_buffers.size(); + + auto result = init.disp.allocateCommandBuffers(&allocInfo, this->command_buffers.data()); + VERIFY_VK_RESULT("failed to allocate command buffers", result); + + for (size_t i = 0; i < this->command_buffers.size(); i++) { + VkCommandBufferBeginInfo begin_info = {}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + + result = init.disp.beginCommandBuffer(this->command_buffers[i], &begin_info); + VERIFY_VK_RESULT("failed to create command buffer", result); + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + init.disp.cmdSetViewport(this->command_buffers[i], 0, 1, &viewport); + init.disp.cmdSetScissor(this->command_buffers[i], 0, 1, &scissor); + + VkRenderPassBeginInfo render_pass_info = {}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + render_pass_info.renderPass = this->render_pass; + render_pass_info.framebuffer = this->framebuffers[i]; + render_pass_info.renderArea.offset = { 0, 0 }; + render_pass_info.renderArea.extent = init.swapchain.extent; + VkClearValue clearColor[2]; + clearColor[0] = { 0.0f, 0.0f, 0.0f, 1.0f }; + clearColor[1] = { 1.0f, 1 }; + render_pass_info.clearValueCount = 2; + render_pass_info.pClearValues = clearColor; + + init.disp.cmdBeginRenderPass(this->command_buffers[i], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); + + init.disp.cmdBindPipeline(this->command_buffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, this->graphics_pipeline); + + init.disp.cmdDraw(this->command_buffers[i], 3, 1, 0, 0); + + init.disp.cmdEndRenderPass(this->command_buffers[i]); + + VkImageMemoryBarrier image_barrier = {}; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + image_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + image_barrier.image = init.swapchain_images[i]; + init.disp.cmdPipelineBarrier( + command_buffers[i], + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + VK_PIPELINE_STAGE_TRANSFER_BIT, + 0, + 0, + nullptr, + 0, + nullptr, + 1, + &image_barrier + ); + + VkImageResolve region = {}; + region.extent.width = init.swapchain.extent.width; + region.extent.height = init.swapchain.extent.height; + region.extent.depth = 1; + region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + region.srcSubresource.layerCount = 1; + region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + region.dstSubresource.layerCount = 1; + init.disp.cmdResolveImage(command_buffers[i], render_targets[i], VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, init.swapchain_images[i], VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); + + image_barrier = {}; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + image_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + image_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + image_barrier.image = init.swapchain_images[i]; + init.disp.cmdPipelineBarrier( + command_buffers[i], + VK_PIPELINE_STAGE_TRANSFER_BIT, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + 0, + 0, + nullptr, + 0, + nullptr, + 1, + &image_barrier + ); + + result = init.disp.endCommandBuffer(this->command_buffers[i]); + VERIFY_VK_RESULT("failed to end command buffer", result); + } +} + +void App::recreate_swapchain() { + init.disp.deviceWaitIdle(); + + init.disp.destroyCommandPool(this->command_pool, nullptr); + + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) + { + init.disp.destroyFramebuffer(framebuffers[i], nullptr); + init.disp.destroyImageView(depth_image_views[i], nullptr); + vmaDestroyImage(allocator, depth_images[i], depth_image_allocations[i]); + init.disp.destroyImageView(render_target_views[i], nullptr); + vmaDestroyImage(allocator, render_targets[i], render_target_allocations[i]); + } + + TestAppBase::recreate_swapchain(false); + + create_render_targets(); + create_depth_buffers(); + create_framebuffers(); + + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); + if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); + this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); + + create_command_buffers(); +} + +const int NUM_FRAMES = 10; +#define IS_DONE(frame_num) frame_num >= NUM_FRAMES; + +bool App::frame(const int frame_num) +{ + init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); + + uint32_t image_index = 0; + VkResult result = init.disp.acquireNextImageKHR( + init.swapchain, UINT64_MAX, this->sync.available_semaphores[this->current_frame], VK_NULL_HANDLE, &image_index); + + if (result == VK_ERROR_OUT_OF_DATE_KHR) { + recreate_swapchain(); + return IS_DONE(frame_num); + } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { + throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); + } + + if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) { + init.disp.waitForFences(1, &this->sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); + } + this->sync.image_in_flight[image_index] = this->sync.in_flight_fences[this->current_frame]; + + VkSubmitInfo submitInfo = {}; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + + VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; + submitInfo.waitSemaphoreCount = 1; + submitInfo.pWaitSemaphores = wait_semaphores; + submitInfo.pWaitDstStageMask = wait_stages; + + submitInfo.commandBufferCount = 1; + submitInfo.pCommandBuffers = &this->command_buffers[image_index]; + + VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; + submitInfo.signalSemaphoreCount = 1; + submitInfo.pSignalSemaphores = signal_semaphores; + + init.disp.resetFences(1, &this->sync.in_flight_fences[this->current_frame]); + + result = init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); + VERIFY_VK_RESULT("failed to submit queue", result); + + VkPresentInfoKHR present_info = {}; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + + present_info.waitSemaphoreCount = 1; + present_info.pWaitSemaphores = signal_semaphores; + + VkSwapchainKHR swapChains[] = { init.swapchain }; + present_info.swapchainCount = 1; + present_info.pSwapchains = swapChains; + + present_info.pImageIndices = &image_index; + + result = init.disp.queuePresentKHR(this->present_queue, &present_info); + if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { + recreate_swapchain(); + return frame_num >= NUM_FRAMES; + } + VERIFY_VK_RESULT("failed to present queue", result); + + this->current_frame = (this->current_frame + 1) % MAX_FRAMES_IN_FLIGHT; + + return IS_DONE(frame_num); +} + +void App::cleanup() +{ + for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + init.disp.destroySemaphore(this->sync.finished_semaphore[i], nullptr); + init.disp.destroySemaphore(this->sync.available_semaphores[i], nullptr); + init.disp.destroyFence(this->sync.in_flight_fences[i], nullptr); + } + + init.disp.destroyCommandPool(this->command_pool, nullptr); + + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) + { + init.disp.destroyFramebuffer(framebuffers[i], nullptr); + init.disp.destroyImageView(depth_image_views[i], nullptr); + vmaDestroyImage(allocator, depth_images[i], depth_image_allocations[i]); + init.disp.destroyImageView(render_target_views[i], nullptr); + vmaDestroyImage(allocator, render_targets[i], render_target_allocations[i]); + } + + init.disp.destroyPipeline(this->graphics_pipeline, nullptr); + init.disp.destroyPipelineLayout(this->pipeline_layout, nullptr); + init.disp.destroyRenderPass(this->render_pass, nullptr); + + vmaDestroyAllocator(this->allocator); +} + +void App::setup() +{ + create_allocator(); + + auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); + if (!graphics_queue.has_value()) + throw std::runtime_error("could not get graphics queue"); + this->graphics_queue = *graphics_queue; + + auto present_queue = init.device.get_queue(gfxrecon::test::QueueType::present); + if (!present_queue.has_value()) + throw std::runtime_error("could not get present queue"); + this->present_queue = *present_queue; + + create_render_targets(); + create_depth_buffers(); + + create_render_pass(); + create_graphics_pipeline(); + + create_framebuffers(); + + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); + if (!queue_family_index) + throw std::runtime_error("could not find graphics queue"); + this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); + + create_command_buffers(); + + this->sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); +} + +GFXRECON_END_NAMESPACE(multisample_depth) + +GFXRECON_END_NAMESPACE(test_app) + +GFXRECON_END_NAMESPACE(gfxrecon) + +int main(int argc, char *argv[]) { + try { + gfxrecon::test_app::multisample_depth::App app{}; + app.run("multisample depth"); + return 0; + } catch (std::exception e) { + std::cout << e.what() << std::endl; + return -1; + } +} diff --git a/test/test_apps/multisample-depth/shaders/frag.spv b/test/test_apps/multisample-depth/shaders/frag.spv new file mode 100644 index 0000000000000000000000000000000000000000..1331eb4cda17af9c3384bd5c049b00369d68a009 GIT binary patch literal 608 zcmYk2-Acni5QWF4X=7{uEa;t7ycCKTDuPr|k=z98&jTzmSv3-qkfefleKw!U8^Lqd zM6xiMowMKWoQc&s=!$HJBLkVr-i9y>RhEyZ#pOoGCeZxa_M@v zD#$Wbl%KIyag=BkMmebHLz8nFT$BXyDr^Eah^9ANY~wFol{aae=CB0CHh!OT|D_gP z+~OR21h80hrn;VD8qTwCCKI#Y!M+1+hS&wqxmmOS3||MCe~$WjRkH^*@;zkotjYJ0 z@w<#po;5vCJ48)(h!9y{o?-_2VJ|Me@eRdBUO?sB0|f8iL*?C3r6K)m<4R+XnwwM_ b-@k)<`rk(IK6#$g#|64_9-jTcrd2B5XJ94y1WEL1biSCP<((ZDi9N+2EkO5O+ZNa+SJ{OHMP5Gi%~!MpPBeq z`NhQZYkN0)H}b+^)X{=X537hJyWhZQx{{*lxY>->zsDlS>9{!>~5o& zF^xiqW>%G~rtE7FCB$p84cQY}L%+KGABmf0!gw8j|KyFIg@Ygs^3cyd2fZ-$`yaoA z-8@4xiN-rR@;Yz*?qJ|&`HwjC<0uLJUX)4|eGAczkLaSL8(;KB1&$P>JvqiPm(<6g zpGA4pPqdYordg8j_Tzpk!qL30@OkVXhv`|E=HYiiu_aGTuKI-7u|m%|6nvEAr_oP! zM@^YycI6J@XmA*1d60C&VUANIcM-?KoTKyN{8;qEIZ9JD^FHa(5wmc%pVHv`fZi3q z6X(jarb;*D=noEWb@T;C50)c_%DTzPb!82TXhu)^%WB4f=F?fe1mvVQA^&H zEva6Qtia?Jgn2ctjI~+e3B}Ye3s1^Z1Lq2}H{XMQQDN>Aj(YsGjAijm34d?tZ1~o! z3=O>1Tvn|0+G^$m;4sTwQA|zAwTd3*h4~(O^2~Tuo<8gh3yR_F42xABo*BTrztI`4 z2^;fE{j`eO>q2n!jamLig>BCAH%GMO;Ms3k#ecfg(2WZc1RdGc^8 zid=bHLk{C^R^^%5l`$81O$NtZugkl#nv8wGcVsN;@5+;dryjgfapdnSrVs8A+?MCt z^a9?lFlS9YnD>c>{4;s#$?xec@CKQgh32mqNB)%%Z;HDm=59E*mr~Q)JsExNOHEG? zWa#11&>#N~_z%*s=Vryp4Y{p~W9DO3F~=hrT5{YocgH@=`dG$!a%S`eMsNFis@V3$ YtUEG#heyNQyE1s{IUDdlo%(axfA`;bD*ylh literal 0 HcmV?d00001 diff --git a/test/test_apps/triangle/CMakeLists.txt b/test/test_apps/triangle/CMakeLists.txt index f3104887bb..e157543a2c 100644 --- a/test/test_apps/triangle/CMakeLists.txt +++ b/test/test_apps/triangle/CMakeLists.txt @@ -30,9 +30,8 @@ add_executable(gfxrecon-testapp-triangle "") target_sources(gfxrecon-testapp-triangle PRIVATE - ${CMAKE_CURRENT_LIST_DIR}/triangle.cpp - ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp - ) + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) target_include_directories(gfxrecon-testapp-triangle PUBLIC ${CMAKE_BINARY_DIR} diff --git a/test/test_apps/triangle/triangle.cpp b/test/test_apps/triangle/app.cpp similarity index 96% rename from test/test_apps/triangle/triangle.cpp rename to test/test_apps/triangle/app.cpp index 1628bac554..570a602a48 100644 --- a/test/test_apps/triangle/triangle.cpp +++ b/test/test_apps/triangle/app.cpp @@ -37,9 +37,9 @@ GFXRECON_BEGIN_NAMESPACE(triangle) const int MAX_FRAMES_IN_FLIGHT = 2; -class Triangle : public gfxrecon::test::TestAppBase { +class App : public gfxrecon::test::TestAppBase { public: - Triangle() : gfxrecon::test::TestAppBase() {} + App() = default; private: VkQueue graphics_queue; VkQueue present_queue; @@ -62,13 +62,12 @@ class Triangle : public gfxrecon::test::TestAppBase { void create_framebuffers(); void create_command_buffers(); void recreate_swapchain(); - void draw_frame(); void cleanup() override; bool frame(const int frame_num) override; void setup() override; }; -void Triangle::create_render_pass() { +void App::create_render_pass() { VkAttachmentDescription color_attachment = {}; color_attachment.format = init.swapchain.image_format; color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; @@ -109,7 +108,7 @@ void Triangle::create_render_pass() { VERIFY_VK_RESULT("failed to create render pass", result); } -void Triangle::create_graphics_pipeline() { +void App::create_graphics_pipeline() { auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); @@ -225,7 +224,7 @@ void Triangle::create_graphics_pipeline() { init.disp.destroyShaderModule(vert_module, nullptr); } -void Triangle::create_framebuffers() { +void App::create_framebuffers() { this->framebuffers.resize(init.swapchain_image_views.size()); for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { @@ -245,7 +244,7 @@ void Triangle::create_framebuffers() { } } -void Triangle::create_command_buffers() { +void App::create_command_buffers() { this->command_buffers.resize(this->framebuffers.size()); VkCommandBufferAllocateInfo allocInfo = {}; @@ -302,7 +301,7 @@ void Triangle::create_command_buffers() { } } -void Triangle::recreate_swapchain() { +void App::recreate_swapchain() { init.disp.deviceWaitIdle(); init.disp.destroyCommandPool(this->command_pool, nullptr); @@ -311,7 +310,7 @@ void Triangle::recreate_swapchain() { init.disp.destroyFramebuffer(framebuffer, nullptr); } - gfxrecon::test::recreate_swapchain(init, false); + TestAppBase::recreate_swapchain(false); create_framebuffers(); @@ -325,7 +324,7 @@ void Triangle::recreate_swapchain() { const int NUM_FRAMES = 10; #define IS_DONE(frame_num) frame_num >= NUM_FRAMES; -bool Triangle::frame(const int frame_num) +bool App::frame(const int frame_num) { init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); @@ -390,7 +389,7 @@ bool Triangle::frame(const int frame_num) return IS_DONE(frame_num); } -void Triangle::cleanup() { +void App::cleanup() { for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { init.disp.destroySemaphore(this->sync.finished_semaphore[i], nullptr); init.disp.destroySemaphore(this->sync.available_semaphores[i], nullptr); @@ -408,7 +407,7 @@ void Triangle::cleanup() { init.disp.destroyRenderPass(this->render_pass, nullptr); } -void Triangle::setup() +void App::setup() { auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); if (!graphics_queue.has_value()) @@ -443,8 +442,8 @@ GFXRECON_END_NAMESPACE(gfxrecon) int main(int argc, char *argv[]) { try { - gfxrecon::test_app::triangle::Triangle triangle{}; - triangle.run("triangle"); + gfxrecon::test_app::triangle::App app{}; + app.run("triangle"); return 0; } catch (std::exception e) { std::cout << e.what() << std::endl; From 154d4fc184214f243b9839cd7eea27ab1229de93 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 27 Oct 2024 12:28:19 -0400 Subject: [PATCH 21/70] Fix multiple frames --- test/test_apps/multisample-depth/app.cpp | 8 ++++---- test/test_apps/triangle/app.cpp | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/test_apps/multisample-depth/app.cpp b/test/test_apps/multisample-depth/app.cpp index 56cf47f5a2..63cf42c57e 100644 --- a/test/test_apps/multisample-depth/app.cpp +++ b/test/test_apps/multisample-depth/app.cpp @@ -521,7 +521,7 @@ void App::recreate_swapchain() { } const int NUM_FRAMES = 10; -#define IS_DONE(frame_num) frame_num >= NUM_FRAMES; +#define IS_RUNNING(frame_num) frame_num < NUM_FRAMES; bool App::frame(const int frame_num) { @@ -533,7 +533,7 @@ bool App::frame(const int frame_num) if (result == VK_ERROR_OUT_OF_DATE_KHR) { recreate_swapchain(); - return IS_DONE(frame_num); + return IS_RUNNING(frame_num); } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } @@ -553,7 +553,7 @@ bool App::frame(const int frame_num) submitInfo.pWaitDstStageMask = wait_stages; submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &this->command_buffers[image_index]; + submitInfo.pCommandBuffers = &this->command_buffers[this->current_frame]; VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; submitInfo.signalSemaphoreCount = 1; @@ -585,7 +585,7 @@ bool App::frame(const int frame_num) this->current_frame = (this->current_frame + 1) % MAX_FRAMES_IN_FLIGHT; - return IS_DONE(frame_num); + return IS_RUNNING(frame_num); } void App::cleanup() diff --git a/test/test_apps/triangle/app.cpp b/test/test_apps/triangle/app.cpp index 570a602a48..1910df1497 100644 --- a/test/test_apps/triangle/app.cpp +++ b/test/test_apps/triangle/app.cpp @@ -322,7 +322,7 @@ void App::recreate_swapchain() { } const int NUM_FRAMES = 10; -#define IS_DONE(frame_num) frame_num >= NUM_FRAMES; +#define IS_RUNNING(frame_num) frame_num < NUM_FRAMES; bool App::frame(const int frame_num) { @@ -334,7 +334,7 @@ bool App::frame(const int frame_num) if (result == VK_ERROR_OUT_OF_DATE_KHR) { recreate_swapchain(); - return IS_DONE(frame_num); + return IS_RUNNING(frame_num); } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } @@ -354,7 +354,7 @@ bool App::frame(const int frame_num) submitInfo.pWaitDstStageMask = wait_stages; submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &this->command_buffers[image_index]; + submitInfo.pCommandBuffers = &this->command_buffers[this->current_frame]; VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; submitInfo.signalSemaphoreCount = 1; @@ -386,7 +386,7 @@ bool App::frame(const int frame_num) this->current_frame = (this->current_frame + 1) % MAX_FRAMES_IN_FLIGHT; - return IS_DONE(frame_num); + return IS_RUNNING(frame_num); } void App::cleanup() { From a8c0b7ad511f41e50a18094ca0bbe57e1d03493e Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 27 Oct 2024 14:41:29 -0400 Subject: [PATCH 22/70] Triangle test with clean validation --- test/test_apps/triangle/app.cpp | 456 ++++++++++++++++++-------------- 1 file changed, 251 insertions(+), 205 deletions(-) diff --git a/test/test_apps/triangle/app.cpp b/test/test_apps/triangle/app.cpp index 1910df1497..b090e6ad69 100644 --- a/test/test_apps/triangle/app.cpp +++ b/test/test_apps/triangle/app.cpp @@ -37,21 +37,22 @@ GFXRECON_BEGIN_NAMESPACE(triangle) const int MAX_FRAMES_IN_FLIGHT = 2; -class App : public gfxrecon::test::TestAppBase { +class App : public gfxrecon::test::TestAppBase +{ public: App() = default; + private: VkQueue graphics_queue; VkQueue present_queue; std::vector framebuffers; - VkRenderPass render_pass; + VkRenderPass render_pass; VkPipelineLayout pipeline_layout; - VkPipeline graphics_pipeline; + VkPipeline graphics_pipeline; - VkCommandPool command_pool; - std::vector command_buffers; + VkCommandPool command_pools[MAX_FRAMES_IN_FLIGHT]; size_t current_frame = 0; @@ -60,115 +61,107 @@ class App : public gfxrecon::test::TestAppBase { void create_render_pass(); void create_graphics_pipeline(); void create_framebuffers(); - void create_command_buffers(); void recreate_swapchain(); void cleanup() override; bool frame(const int frame_num) override; void setup() override; }; -void App::create_render_pass() { +void App::create_render_pass() +{ VkAttachmentDescription color_attachment = {}; - color_attachment.format = init.swapchain.image_format; - color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; - color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; - color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; - color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; - color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; - color_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - color_attachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + color_attachment.format = init.swapchain.image_format; + color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; + color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + color_attachment.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_attachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; VkAttachmentReference color_attachment_ref = {}; - color_attachment_ref.attachment = 0; - color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_attachment_ref.attachment = 0; + color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; VkSubpassDescription subpass = {}; - subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; subpass.colorAttachmentCount = 1; - subpass.pColorAttachments = &color_attachment_ref; - - VkSubpassDependency dependency = {}; - dependency.srcSubpass = VK_SUBPASS_EXTERNAL; - dependency.dstSubpass = 0; - dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - dependency.srcAccessMask = 0; - dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + subpass.pColorAttachments = &color_attachment_ref; VkRenderPassCreateInfo render_pass_info = {}; - render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - render_pass_info.attachmentCount = 1; - render_pass_info.pAttachments = &color_attachment; - render_pass_info.subpassCount = 1; - render_pass_info.pSubpasses = &subpass; - render_pass_info.dependencyCount = 1; - render_pass_info.pDependencies = &dependency; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + render_pass_info.attachmentCount = 1; + render_pass_info.pAttachments = &color_attachment; + render_pass_info.subpassCount = 1; + render_pass_info.pSubpasses = &subpass; + render_pass_info.dependencyCount = 0; auto result = init.disp.createRenderPass(&render_pass_info, nullptr, &this->render_pass); VERIFY_VK_RESULT("failed to create render pass", result); } -void App::create_graphics_pipeline() { +void App::create_graphics_pipeline() +{ auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); VkPipelineShaderStageCreateInfo vert_stage_info = {}; - vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; - vert_stage_info.module = vert_module; - vert_stage_info.pName = "main"; + vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; + vert_stage_info.module = vert_module; + vert_stage_info.pName = "main"; VkPipelineShaderStageCreateInfo frag_stage_info = {}; - frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; - frag_stage_info.module = frag_module; - frag_stage_info.pName = "main"; + frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + frag_stage_info.module = frag_module; + frag_stage_info.pName = "main"; VkPipelineShaderStageCreateInfo shader_stages[] = { vert_stage_info, frag_stage_info }; VkPipelineVertexInputStateCreateInfo vertex_input_info = {}; - vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; - vertex_input_info.vertexBindingDescriptionCount = 0; - vertex_input_info.vertexAttributeDescriptionCount = 0; + vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_input_info.vertexBindingDescriptionCount = 0; + vertex_input_info.vertexAttributeDescriptionCount = 0; VkPipelineInputAssemblyStateCreateInfo input_assembly = {}; - input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; - input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; - input_assembly.primitiveRestartEnable = VK_FALSE; + input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_assembly.primitiveRestartEnable = VK_FALSE; VkViewport viewport = {}; - viewport.x = 0.0f; - viewport.y = 0.0f; - viewport.width = (float)init.swapchain.extent.width; - viewport.height = (float)init.swapchain.extent.height; - viewport.minDepth = 0.0f; - viewport.maxDepth = 1.0f; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; VkRect2D scissor = {}; - scissor.offset = { 0, 0 }; - scissor.extent = init.swapchain.extent; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; VkPipelineViewportStateCreateInfo viewport_state = {}; - viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; - viewport_state.viewportCount = 1; - viewport_state.pViewports = &viewport; - viewport_state.scissorCount = 1; - viewport_state.pScissors = &scissor; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = &viewport; + viewport_state.scissorCount = 1; + viewport_state.pScissors = &scissor; VkPipelineRasterizationStateCreateInfo rasterizer = {}; - rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; - rasterizer.depthClampEnable = VK_FALSE; - rasterizer.rasterizerDiscardEnable = VK_FALSE; - rasterizer.polygonMode = VK_POLYGON_MODE_FILL; - rasterizer.lineWidth = 1.0f; - rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; - rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; - rasterizer.depthBiasEnable = VK_FALSE; + rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterizer.depthClampEnable = VK_FALSE; + rasterizer.rasterizerDiscardEnable = VK_FALSE; + rasterizer.polygonMode = VK_POLYGON_MODE_FILL; + rasterizer.lineWidth = 1.0f; + rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; + rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterizer.depthBiasEnable = VK_FALSE; VkPipelineMultisampleStateCreateInfo multisampling = {}; - multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; - multisampling.sampleShadingEnable = VK_FALSE; - multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; VkPipelineColorBlendAttachmentState colorBlendAttachment = {}; colorBlendAttachment.colorWriteMask = @@ -176,20 +169,20 @@ void App::create_graphics_pipeline() { colorBlendAttachment.blendEnable = VK_FALSE; VkPipelineColorBlendStateCreateInfo color_blending = {}; - color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; - color_blending.logicOpEnable = VK_FALSE; - color_blending.logicOp = VK_LOGIC_OP_COPY; - color_blending.attachmentCount = 1; - color_blending.pAttachments = &colorBlendAttachment; - color_blending.blendConstants[0] = 0.0f; - color_blending.blendConstants[1] = 0.0f; - color_blending.blendConstants[2] = 0.0f; - color_blending.blendConstants[3] = 0.0f; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &colorBlendAttachment; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; VkPipelineLayoutCreateInfo pipeline_layout_info = {}; - pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; - pipeline_layout_info.setLayoutCount = 0; - pipeline_layout_info.pushConstantRangeCount = 0; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = 0; + pipeline_layout_info.pushConstantRangeCount = 0; auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &this->pipeline_layout); VERIFY_VK_RESULT("failed to create pipeline layout", result); @@ -197,25 +190,25 @@ void App::create_graphics_pipeline() { std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; VkPipelineDynamicStateCreateInfo dynamic_info = {}; - dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; - dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); - dynamic_info.pDynamicStates = dynamic_states.data(); + dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); + dynamic_info.pDynamicStates = dynamic_states.data(); VkGraphicsPipelineCreateInfo pipeline_info = {}; - pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; - pipeline_info.stageCount = 2; - pipeline_info.pStages = shader_stages; - pipeline_info.pVertexInputState = &vertex_input_info; - pipeline_info.pInputAssemblyState = &input_assembly; - pipeline_info.pViewportState = &viewport_state; - pipeline_info.pRasterizationState = &rasterizer; - pipeline_info.pMultisampleState = &multisampling; - pipeline_info.pColorBlendState = &color_blending; - pipeline_info.pDynamicState = &dynamic_info; - pipeline_info.layout = this->pipeline_layout; - pipeline_info.renderPass = this->render_pass; - pipeline_info.subpass = 0; - pipeline_info.basePipelineHandle = VK_NULL_HANDLE; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.stageCount = 2; + pipeline_info.pStages = shader_stages; + pipeline_info.pVertexInputState = &vertex_input_info; + pipeline_info.pInputAssemblyState = &input_assembly; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &rasterizer; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_info; + pipeline_info.layout = this->pipeline_layout; + pipeline_info.renderPass = this->render_pass; + pipeline_info.subpass = 0; + pipeline_info.basePipelineHandle = VK_NULL_HANDLE; result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &this->graphics_pipeline); VERIFY_VK_RESULT("failed to create graphics pipeline", result); @@ -224,101 +217,40 @@ void App::create_graphics_pipeline() { init.disp.destroyShaderModule(vert_module, nullptr); } -void App::create_framebuffers() { +void App::create_framebuffers() +{ this->framebuffers.resize(init.swapchain_image_views.size()); - for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) + { VkImageView attachments[] = { init.swapchain_image_views[i] }; VkFramebufferCreateInfo framebuffer_info = {}; - framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebuffer_info.renderPass = this->render_pass; - framebuffer_info.attachmentCount = 1; - framebuffer_info.pAttachments = attachments; - framebuffer_info.width = init.swapchain.extent.width; - framebuffer_info.height = init.swapchain.extent.height; - framebuffer_info.layers = 1; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.renderPass = this->render_pass; + framebuffer_info.attachmentCount = 1; + framebuffer_info.pAttachments = attachments; + framebuffer_info.width = init.swapchain.extent.width; + framebuffer_info.height = init.swapchain.extent.height; + framebuffer_info.layers = 1; auto result = init.disp.createFramebuffer(&framebuffer_info, nullptr, &this->framebuffers[i]); VERIFY_VK_RESULT("failed to create framebuffer", result); } } -void App::create_command_buffers() { - this->command_buffers.resize(this->framebuffers.size()); - - VkCommandBufferAllocateInfo allocInfo = {}; - allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; - allocInfo.commandPool = this->command_pool; - allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; - allocInfo.commandBufferCount = (uint32_t)this->command_buffers.size(); - - auto result = init.disp.allocateCommandBuffers(&allocInfo, this->command_buffers.data()); - VERIFY_VK_RESULT("failed to allocate command buffers", result); - - for (size_t i = 0; i < this->command_buffers.size(); i++) { - VkCommandBufferBeginInfo begin_info = {}; - begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - - result = init.disp.beginCommandBuffer(this->command_buffers[i], &begin_info); - VERIFY_VK_RESULT("failed to create command buffer", result); - - VkRenderPassBeginInfo render_pass_info = {}; - render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; - render_pass_info.renderPass = this->render_pass; - render_pass_info.framebuffer = this->framebuffers[i]; - render_pass_info.renderArea.offset = { 0, 0 }; - render_pass_info.renderArea.extent = init.swapchain.extent; - VkClearValue clearColor{ { { 0.0f, 0.0f, 0.0f, 1.0f } } }; - render_pass_info.clearValueCount = 1; - render_pass_info.pClearValues = &clearColor; - - VkViewport viewport = {}; - viewport.x = 0.0f; - viewport.y = 0.0f; - viewport.width = (float)init.swapchain.extent.width; - viewport.height = (float)init.swapchain.extent.height; - viewport.minDepth = 0.0f; - viewport.maxDepth = 1.0f; - - VkRect2D scissor = {}; - scissor.offset = { 0, 0 }; - scissor.extent = init.swapchain.extent; - - init.disp.cmdSetViewport(this->command_buffers[i], 0, 1, &viewport); - init.disp.cmdSetScissor(this->command_buffers[i], 0, 1, &scissor); - - init.disp.cmdBeginRenderPass(this->command_buffers[i], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); - - init.disp.cmdBindPipeline(this->command_buffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, this->graphics_pipeline); - - init.disp.cmdDraw(this->command_buffers[i], 3, 1, 0, 0); - - init.disp.cmdEndRenderPass(this->command_buffers[i]); - - result = init.disp.endCommandBuffer(this->command_buffers[i]); - VERIFY_VK_RESULT("failed to end command buffer", result); - } -} - -void App::recreate_swapchain() { +void App::recreate_swapchain() +{ init.disp.deviceWaitIdle(); - init.disp.destroyCommandPool(this->command_pool, nullptr); - - for (auto framebuffer : this->framebuffers) { + for (auto framebuffer : this->framebuffers) + { init.disp.destroyFramebuffer(framebuffer, nullptr); } TestAppBase::recreate_swapchain(false); create_framebuffers(); - - auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); - if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - - create_command_buffers(); } const int NUM_FRAMES = 10; @@ -329,56 +261,160 @@ bool App::frame(const int frame_num) init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; - VkResult result = init.disp.acquireNextImageKHR( + VkResult result = init.disp.acquireNextImageKHR( init.swapchain, UINT64_MAX, this->sync.available_semaphores[this->current_frame], VK_NULL_HANDLE, &image_index); - if (result == VK_ERROR_OUT_OF_DATE_KHR) { + if (result == VK_ERROR_OUT_OF_DATE_KHR) + { recreate_swapchain(); return IS_RUNNING(frame_num); - } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { + } + else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) + { throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } - if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) { + if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) + { init.disp.waitForFences(1, &this->sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); } this->sync.image_in_flight[image_index] = this->sync.in_flight_fences[this->current_frame]; + init.disp.resetCommandPool(this->command_pools[current_frame], 0); + VkCommandBufferAllocateInfo allocate_info = {}; + allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + allocate_info.commandBufferCount = 1; + allocate_info.commandPool = this->command_pools[current_frame]; + VkCommandBuffer command_buffer; + result = init.disp.allocateCommandBuffers(&allocate_info, &command_buffer); + VERIFY_VK_RESULT("failed to allocate command buffer", result); + + { + VkCommandBufferBeginInfo begin_info = {}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + result = init.disp.beginCommandBuffer(command_buffer, &begin_info); + VERIFY_VK_RESULT("failed to create command buffer", result); + + { + VkImageMemoryBarrier image_barrier = {}; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.image = init.swapchain_images[image_index]; + image_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barrier.srcAccessMask = VK_ACCESS_NONE; + image_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + init.disp.cmdPipelineBarrier(command_buffer, + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + 0, + 0, + nullptr, + 0, + nullptr, + 1, + &image_barrier); + } + + VkRenderPassBeginInfo render_pass_info = {}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + render_pass_info.renderPass = this->render_pass; + render_pass_info.framebuffer = this->framebuffers[image_index]; + render_pass_info.renderArea.offset = { 0, 0 }; + render_pass_info.renderArea.extent = init.swapchain.extent; + VkClearValue clearColor{ { { 0.0f, 0.0f, 0.0f, 1.0f } } }; + render_pass_info.clearValueCount = 1; + render_pass_info.pClearValues = &clearColor; + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + init.disp.cmdSetViewport(command_buffer, 0, 1, &viewport); + init.disp.cmdSetScissor(command_buffer, 0, 1, &scissor); + + init.disp.cmdBeginRenderPass(command_buffer, &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); + + init.disp.cmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, this->graphics_pipeline); + + init.disp.cmdDraw(command_buffer, 3, 1, 0, 0); + + init.disp.cmdEndRenderPass(command_buffer); + + { + VkImageMemoryBarrier image_barrier = {}; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.image = init.swapchain_images[image_index]; + image_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + image_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + image_barrier.dstAccessMask = VK_ACCESS_NONE; + init.disp.cmdPipelineBarrier(command_buffer, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + 0, + 0, + nullptr, + 0, + nullptr, + 1, + &image_barrier); + } + + result = init.disp.endCommandBuffer(command_buffer); + VERIFY_VK_RESULT("failed to end command buffer", result); + } + VkSubmitInfo submitInfo = {}; - submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; - VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; - submitInfo.waitSemaphoreCount = 1; - submitInfo.pWaitSemaphores = wait_semaphores; - submitInfo.pWaitDstStageMask = wait_stages; + VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT }; + submitInfo.waitSemaphoreCount = 1; + submitInfo.pWaitSemaphores = wait_semaphores; + submitInfo.pWaitDstStageMask = wait_stages; submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &this->command_buffers[this->current_frame]; + submitInfo.pCommandBuffers = &command_buffer; VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; submitInfo.signalSemaphoreCount = 1; - submitInfo.pSignalSemaphores = signal_semaphores; + submitInfo.pSignalSemaphores = signal_semaphores; init.disp.resetFences(1, &this->sync.in_flight_fences[this->current_frame]); - result = init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); + result = + init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); VERIFY_VK_RESULT("failed to submit queue", result); VkPresentInfoKHR present_info = {}; - present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; present_info.waitSemaphoreCount = 1; - present_info.pWaitSemaphores = signal_semaphores; + present_info.pWaitSemaphores = signal_semaphores; VkSwapchainKHR swapChains[] = { init.swapchain }; present_info.swapchainCount = 1; - present_info.pSwapchains = swapChains; + present_info.pSwapchains = swapChains; present_info.pImageIndices = &image_index; result = init.disp.queuePresentKHR(this->present_queue, &present_info); - if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { + if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) + { recreate_swapchain(); return frame_num >= NUM_FRAMES; } @@ -389,16 +425,22 @@ bool App::frame(const int frame_num) return IS_RUNNING(frame_num); } -void App::cleanup() { - for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { +void App::cleanup() +{ + for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { init.disp.destroySemaphore(this->sync.finished_semaphore[i], nullptr); init.disp.destroySemaphore(this->sync.available_semaphores[i], nullptr); init.disp.destroyFence(this->sync.in_flight_fences[i], nullptr); } - init.disp.destroyCommandPool(this->command_pool, nullptr); + for (auto command_pool : command_pools) + { + init.disp.destroyCommandPool(command_pool, nullptr); + } - for (auto framebuffer : this->framebuffers) { + for (auto framebuffer : this->framebuffers) + { init.disp.destroyFramebuffer(framebuffer, nullptr); } @@ -427,9 +469,9 @@ void App::setup() auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - - create_command_buffers(); + for (auto& command_pool : command_pools) { + command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); + } this->sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); } @@ -440,12 +482,16 @@ GFXRECON_END_NAMESPACE(test_app) GFXRECON_END_NAMESPACE(gfxrecon) -int main(int argc, char *argv[]) { - try { +int main(int argc, char* argv[]) +{ + try + { gfxrecon::test_app::triangle::App app{}; app.run("triangle"); return 0; - } catch (std::exception e) { + } + catch (std::exception e) + { std::cout << e.what() << std::endl; return -1; } From 62c3b6145d193ff1e474490a91b8ce02aa900c11 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 27 Oct 2024 15:03:11 -0400 Subject: [PATCH 23/70] Clean validation for multisample --- test/test_apps/multisample-depth/app.cpp | 662 ++++++++++++----------- 1 file changed, 336 insertions(+), 326 deletions(-) diff --git a/test/test_apps/multisample-depth/app.cpp b/test/test_apps/multisample-depth/app.cpp index 63cf42c57e..bd5f87fbbd 100644 --- a/test/test_apps/multisample-depth/app.cpp +++ b/test/test_apps/multisample-depth/app.cpp @@ -38,34 +38,35 @@ GFXRECON_BEGIN_NAMESPACE(multisample_depth) const int MAX_FRAMES_IN_FLIGHT = 2; -class App : public gfxrecon::test::TestAppBase { +class App : public gfxrecon::test::TestAppBase +{ public: App() = default; + private: VkQueue graphics_queue; VkQueue present_queue; - std::vector depth_images; + std::vector depth_images; std::vector depth_image_allocations; - std::vector depth_image_views; + std::vector depth_image_views; - std::vector render_targets; + std::vector render_targets; std::vector render_target_allocations; - std::vector render_target_views; + std::vector render_target_views; std::vector framebuffers; - VkRenderPass render_pass; + VkRenderPass render_pass; VkPipelineLayout pipeline_layout; - VkPipeline graphics_pipeline; + VkPipeline graphics_pipeline; - VkCommandPool command_pool; - std::vector command_buffers; + VkCommandPool command_pools[MAX_FRAMES_IN_FLIGHT]; size_t current_frame = 0; gfxrecon::test::Sync sync; - VmaAllocator allocator; + VmaAllocator allocator; void create_allocator(); void create_render_targets(); @@ -73,29 +74,29 @@ class App : public gfxrecon::test::TestAppBase { void create_render_pass(); void create_graphics_pipeline(); void create_framebuffers(); - void create_command_buffers(); void recreate_swapchain(); void cleanup() override; bool frame(const int frame_num) override; void setup() override; - void configure_swapchain_builder(gfxrecon::test::SwapchainBuilder &swapchain_builder) override; + void configure_swapchain_builder(gfxrecon::test::SwapchainBuilder& swapchain_builder) override; }; -void App::configure_swapchain_builder(gfxrecon::test::SwapchainBuilder &swapchain_builder) { +void App::configure_swapchain_builder(gfxrecon::test::SwapchainBuilder& swapchain_builder) +{ swapchain_builder.add_image_usage_flags(VK_IMAGE_USAGE_TRANSFER_DST_BIT); } void App::create_allocator() { - VmaVulkanFunctions vulkan_functions = {}; + VmaVulkanFunctions vulkan_functions = {}; vulkan_functions.vkGetInstanceProcAddr = init.inst_disp.fp_vkGetInstanceProcAddr; - vulkan_functions.vkGetDeviceProcAddr = init.disp.fp_vkGetDeviceProcAddr; + vulkan_functions.vkGetDeviceProcAddr = init.disp.fp_vkGetDeviceProcAddr; VmaAllocatorCreateInfo allocator_create_info = {}; - allocator_create_info.physicalDevice = init.physical_device; - allocator_create_info.device = init.device; - allocator_create_info.instance = init.instance; - allocator_create_info.pVulkanFunctions = &vulkan_functions; + allocator_create_info.physicalDevice = init.physical_device; + allocator_create_info.device = init.device; + allocator_create_info.instance = init.instance; + allocator_create_info.pVulkanFunctions = &vulkan_functions; vmaCreateAllocator(&allocator_create_info, &this->allocator); } @@ -105,33 +106,39 @@ void App::create_render_targets() render_target_allocations.resize(init.swapchain_image_views.size()); render_target_views.resize(init.swapchain_image_views.size()); - for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) + { VkExtent3D extent = {}; - VkResult result; + VkResult result; VkImageCreateInfo image_create_info = {}; - image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; - image_create_info.imageType = VK_IMAGE_TYPE_2D; - image_create_info.format = init.swapchain.image_format; - image_create_info.extent.height = init.swapchain.extent.height; - image_create_info.extent.width = init.swapchain.extent.width; - image_create_info.extent.depth = 1; - image_create_info.mipLevels = 1; - image_create_info.arrayLayers = 1; - image_create_info.samples = VK_SAMPLE_COUNT_2_BIT; - image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT; + image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_create_info.imageType = VK_IMAGE_TYPE_2D; + image_create_info.format = init.swapchain.image_format; + image_create_info.extent.height = init.swapchain.extent.height; + image_create_info.extent.width = init.swapchain.extent.width; + image_create_info.extent.depth = 1; + image_create_info.mipLevels = 1; + image_create_info.arrayLayers = 1; + image_create_info.samples = VK_SAMPLE_COUNT_2_BIT; + image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT; VmaAllocationCreateInfo allocation_info = {}; - allocation_info.usage = VMA_MEMORY_USAGE_AUTO; - - result = vmaCreateImage(allocator, &image_create_info, &allocation_info, &render_targets[i], &render_target_allocations[i], nullptr); + allocation_info.usage = VMA_MEMORY_USAGE_AUTO; + + result = vmaCreateImage(allocator, + &image_create_info, + &allocation_info, + &render_targets[i], + &render_target_allocations[i], + nullptr); VERIFY_VK_RESULT("failed to create render target", result); - VkImageViewCreateInfo image_view_create_info = {}; - image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; - image_view_create_info.image = this->render_targets[i]; - image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D; - image_view_create_info.format = image_create_info.format; + VkImageViewCreateInfo image_view_create_info = {}; + image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + image_view_create_info.image = this->render_targets[i]; + image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D; + image_view_create_info.format = image_create_info.format; image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; image_view_create_info.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; image_view_create_info.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; @@ -146,33 +153,35 @@ void App::create_depth_buffers() depth_image_allocations.resize(init.swapchain_image_views.size()); depth_image_views.resize(init.swapchain_image_views.size()); - for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) + { VkExtent3D extent = {}; - VkResult result; + VkResult result; VkImageCreateInfo image_create_info = {}; - image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; - image_create_info.imageType = VK_IMAGE_TYPE_2D; - image_create_info.format = VK_FORMAT_D16_UNORM; - image_create_info.extent.height = init.swapchain.extent.height; - image_create_info.extent.width = init.swapchain.extent.width; - image_create_info.extent.depth = 1; - image_create_info.mipLevels = 1; - image_create_info.arrayLayers = 1; - image_create_info.samples = VK_SAMPLE_COUNT_2_BIT; - image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; + image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_create_info.imageType = VK_IMAGE_TYPE_2D; + image_create_info.format = VK_FORMAT_D16_UNORM; + image_create_info.extent.height = init.swapchain.extent.height; + image_create_info.extent.width = init.swapchain.extent.width; + image_create_info.extent.depth = 1; + image_create_info.mipLevels = 1; + image_create_info.arrayLayers = 1; + image_create_info.samples = VK_SAMPLE_COUNT_2_BIT; + image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT; VmaAllocationCreateInfo allocation_info = {}; - allocation_info.usage = VMA_MEMORY_USAGE_AUTO; + allocation_info.usage = VMA_MEMORY_USAGE_AUTO; - result = vmaCreateImage(allocator, &image_create_info, &allocation_info, &depth_images[i], &depth_image_allocations[i], nullptr); + result = vmaCreateImage( + allocator, &image_create_info, &allocation_info, &depth_images[i], &depth_image_allocations[i], nullptr); VERIFY_VK_RESULT("failed to create depth buffer image", result); - VkImageViewCreateInfo image_view_create_info = {}; - image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; - image_view_create_info.image = this->depth_images[i]; - image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D; - image_view_create_info.format = image_create_info.format; + VkImageViewCreateInfo image_view_create_info = {}; + image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + image_view_create_info.image = this->depth_images[i]; + image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D; + image_view_create_info.format = image_create_info.format; image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; image_view_create_info.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; image_view_create_info.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; @@ -181,126 +190,115 @@ void App::create_depth_buffers() } } -void App::create_render_pass() { +void App::create_render_pass() +{ VkAttachmentDescription attachments[2]; - attachments[0] = {}; - attachments[0].format = init.swapchain.image_format; - attachments[0].samples = VK_SAMPLE_COUNT_2_BIT; - attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; - attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE; - attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + attachments[0] = {}; + attachments[0].format = init.swapchain.image_format; + attachments[0].samples = VK_SAMPLE_COUNT_2_BIT; + attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE; + attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; - attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - attachments[0].finalLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; - attachments[1] = {}; - attachments[1].format = VK_FORMAT_D16_UNORM; - attachments[1].samples = VK_SAMPLE_COUNT_2_BIT; - attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; - attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE; - attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + attachments[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + attachments[1] = {}; + attachments[1].format = VK_FORMAT_D16_UNORM; + attachments[1].samples = VK_SAMPLE_COUNT_2_BIT; + attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE; + attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; - attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + attachments[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; VkAttachmentReference color_attachment_ref = {}; - color_attachment_ref.attachment = 0; - color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_attachment_ref.attachment = 0; + color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; VkAttachmentReference depth_attachment_ref = {}; - depth_attachment_ref.attachment = 1; - depth_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + depth_attachment_ref.attachment = 1; + depth_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; - VkSubpassDescription subpass = {}; - subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; - subpass.colorAttachmentCount = 1; - subpass.pColorAttachments = &color_attachment_ref; + VkSubpassDescription subpass = {}; + subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + subpass.colorAttachmentCount = 1; + subpass.pColorAttachments = &color_attachment_ref; subpass.pDepthStencilAttachment = &depth_attachment_ref; - VkSubpassDependency dependency = {}; - dependency.srcSubpass = VK_SUBPASS_EXTERNAL; - dependency.dstSubpass = 0; - dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; - dependency.srcAccessMask = 0; - dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT; - dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; - VkRenderPassCreateInfo render_pass_info = {}; - render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - render_pass_info.attachmentCount = 2; - render_pass_info.pAttachments = attachments; - render_pass_info.subpassCount = 1; - render_pass_info.pSubpasses = &subpass; - render_pass_info.dependencyCount = 1; - render_pass_info.pDependencies = &dependency; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + render_pass_info.attachmentCount = 2; + render_pass_info.pAttachments = attachments; + render_pass_info.subpassCount = 1; + render_pass_info.pSubpasses = &subpass; auto result = init.disp.createRenderPass(&render_pass_info, nullptr, &this->render_pass); VERIFY_VK_RESULT("failed to create render pass", result); } -void App::create_graphics_pipeline() { +void App::create_graphics_pipeline() +{ auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); VkPipelineShaderStageCreateInfo vert_stage_info = {}; - vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; - vert_stage_info.module = vert_module; - vert_stage_info.pName = "main"; + vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; + vert_stage_info.module = vert_module; + vert_stage_info.pName = "main"; VkPipelineShaderStageCreateInfo frag_stage_info = {}; - frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; - frag_stage_info.module = frag_module; - frag_stage_info.pName = "main"; + frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + frag_stage_info.module = frag_module; + frag_stage_info.pName = "main"; VkPipelineShaderStageCreateInfo shader_stages[] = { vert_stage_info, frag_stage_info }; VkPipelineVertexInputStateCreateInfo vertex_input_info = {}; - vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; - vertex_input_info.vertexBindingDescriptionCount = 0; - vertex_input_info.vertexAttributeDescriptionCount = 0; + vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_input_info.vertexBindingDescriptionCount = 0; + vertex_input_info.vertexAttributeDescriptionCount = 0; VkPipelineInputAssemblyStateCreateInfo input_assembly = {}; - input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; - input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; - input_assembly.primitiveRestartEnable = VK_FALSE; + input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_assembly.primitiveRestartEnable = VK_FALSE; VkViewport viewport = {}; - viewport.x = 0.0f; - viewport.y = 0.0f; - viewport.width = (float)init.swapchain.extent.width; - viewport.height = (float)init.swapchain.extent.height; - viewport.minDepth = 0.0f; - viewport.maxDepth = 1.0f; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; VkRect2D scissor = {}; - scissor.offset = { 0, 0 }; - scissor.extent = init.swapchain.extent; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; VkPipelineViewportStateCreateInfo viewport_state = {}; - viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; - viewport_state.viewportCount = 1; - viewport_state.pViewports = &viewport; - viewport_state.scissorCount = 1; - viewport_state.pScissors = &scissor; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = &viewport; + viewport_state.scissorCount = 1; + viewport_state.pScissors = &scissor; VkPipelineRasterizationStateCreateInfo rasterizer = {}; - rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; - rasterizer.depthClampEnable = VK_FALSE; - rasterizer.rasterizerDiscardEnable = VK_FALSE; - rasterizer.polygonMode = VK_POLYGON_MODE_FILL; - rasterizer.lineWidth = 1.0f; - rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; - rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; - rasterizer.depthBiasEnable = VK_FALSE; + rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterizer.depthClampEnable = VK_FALSE; + rasterizer.rasterizerDiscardEnable = VK_FALSE; + rasterizer.polygonMode = VK_POLYGON_MODE_FILL; + rasterizer.lineWidth = 1.0f; + rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; + rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterizer.depthBiasEnable = VK_FALSE; VkPipelineMultisampleStateCreateInfo multisampling = {}; - multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; - multisampling.sampleShadingEnable = VK_FALSE; - multisampling.rasterizationSamples = VK_SAMPLE_COUNT_2_BIT; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_2_BIT; VkPipelineColorBlendAttachmentState colorBlendAttachment = {}; colorBlendAttachment.colorWriteMask = @@ -308,20 +306,20 @@ void App::create_graphics_pipeline() { colorBlendAttachment.blendEnable = VK_FALSE; VkPipelineColorBlendStateCreateInfo color_blending = {}; - color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; - color_blending.logicOpEnable = VK_FALSE; - color_blending.logicOp = VK_LOGIC_OP_COPY; - color_blending.attachmentCount = 1; - color_blending.pAttachments = &colorBlendAttachment; - color_blending.blendConstants[0] = 0.0f; - color_blending.blendConstants[1] = 0.0f; - color_blending.blendConstants[2] = 0.0f; - color_blending.blendConstants[3] = 0.0f; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &colorBlendAttachment; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; VkPipelineLayoutCreateInfo pipeline_layout_info = {}; - pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; - pipeline_layout_info.setLayoutCount = 0; - pipeline_layout_info.pushConstantRangeCount = 0; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = 0; + pipeline_layout_info.pushConstantRangeCount = 0; auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &this->pipeline_layout); VERIFY_VK_RESULT("failed to create pipeline layout", result); @@ -329,30 +327,30 @@ void App::create_graphics_pipeline() { std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; VkPipelineDynamicStateCreateInfo dynamic_info = {}; - dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; - dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); - dynamic_info.pDynamicStates = dynamic_states.data(); + dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); + dynamic_info.pDynamicStates = dynamic_states.data(); VkPipelineDepthStencilStateCreateInfo depth_stencil = {}; - depth_stencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; - depth_stencil.depthTestEnable = true; - depth_stencil.depthWriteEnable = true; - depth_stencil.depthCompareOp = VK_COMPARE_OP_LESS; + depth_stencil.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; + depth_stencil.depthTestEnable = true; + depth_stencil.depthWriteEnable = true; + depth_stencil.depthCompareOp = VK_COMPARE_OP_LESS; VkGraphicsPipelineCreateInfo pipeline_info = {}; - pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; - pipeline_info.stageCount = 2; - pipeline_info.pStages = shader_stages; - pipeline_info.pVertexInputState = &vertex_input_info; - pipeline_info.pInputAssemblyState = &input_assembly; - pipeline_info.pViewportState = &viewport_state; - pipeline_info.pRasterizationState = &rasterizer; - pipeline_info.pMultisampleState = &multisampling; - pipeline_info.pColorBlendState = &color_blending; - pipeline_info.pDynamicState = &dynamic_info; - pipeline_info.layout = this->pipeline_layout; - pipeline_info.renderPass = this->render_pass; - pipeline_info.pDepthStencilState = &depth_stencil; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.stageCount = 2; + pipeline_info.pStages = shader_stages; + pipeline_info.pVertexInputState = &vertex_input_info; + pipeline_info.pInputAssemblyState = &input_assembly; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &rasterizer; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_info; + pipeline_info.layout = this->pipeline_layout; + pipeline_info.renderPass = this->render_pass; + pipeline_info.pDepthStencilState = &depth_stencil; result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &this->graphics_pipeline); VERIFY_VK_RESULT("failed to create graphics pipeline", result); @@ -361,143 +359,32 @@ void App::create_graphics_pipeline() { init.disp.destroyShaderModule(vert_module, nullptr); } -void App::create_framebuffers() { +void App::create_framebuffers() +{ this->framebuffers.resize(init.swapchain_image_views.size()); - for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { + for (size_t i = 0; i < init.swapchain_image_views.size(); i++) + { VkImageView attachments[] = { render_target_views[i], depth_image_views[i] }; VkFramebufferCreateInfo framebuffer_info = {}; - framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebuffer_info.renderPass = this->render_pass; - framebuffer_info.attachmentCount = 2; - framebuffer_info.pAttachments = attachments; - framebuffer_info.width = init.swapchain.extent.width; - framebuffer_info.height = init.swapchain.extent.height; - framebuffer_info.layers = 1; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.renderPass = this->render_pass; + framebuffer_info.attachmentCount = 2; + framebuffer_info.pAttachments = attachments; + framebuffer_info.width = init.swapchain.extent.width; + framebuffer_info.height = init.swapchain.extent.height; + framebuffer_info.layers = 1; auto result = init.disp.createFramebuffer(&framebuffer_info, nullptr, &this->framebuffers[i]); VERIFY_VK_RESULT("failed to create framebuffer", result); } } -void App::create_command_buffers() { - this->command_buffers.resize(MAX_FRAMES_IN_FLIGHT); - - VkCommandBufferAllocateInfo allocInfo = {}; - allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; - allocInfo.commandPool = this->command_pool; - allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; - allocInfo.commandBufferCount = (uint32_t)this->command_buffers.size(); - - auto result = init.disp.allocateCommandBuffers(&allocInfo, this->command_buffers.data()); - VERIFY_VK_RESULT("failed to allocate command buffers", result); - - for (size_t i = 0; i < this->command_buffers.size(); i++) { - VkCommandBufferBeginInfo begin_info = {}; - begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - - result = init.disp.beginCommandBuffer(this->command_buffers[i], &begin_info); - VERIFY_VK_RESULT("failed to create command buffer", result); - - VkViewport viewport = {}; - viewport.x = 0.0f; - viewport.y = 0.0f; - viewport.width = (float)init.swapchain.extent.width; - viewport.height = (float)init.swapchain.extent.height; - viewport.minDepth = 0.0f; - viewport.maxDepth = 1.0f; - - VkRect2D scissor = {}; - scissor.offset = { 0, 0 }; - scissor.extent = init.swapchain.extent; - - init.disp.cmdSetViewport(this->command_buffers[i], 0, 1, &viewport); - init.disp.cmdSetScissor(this->command_buffers[i], 0, 1, &scissor); - - VkRenderPassBeginInfo render_pass_info = {}; - render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; - render_pass_info.renderPass = this->render_pass; - render_pass_info.framebuffer = this->framebuffers[i]; - render_pass_info.renderArea.offset = { 0, 0 }; - render_pass_info.renderArea.extent = init.swapchain.extent; - VkClearValue clearColor[2]; - clearColor[0] = { 0.0f, 0.0f, 0.0f, 1.0f }; - clearColor[1] = { 1.0f, 1 }; - render_pass_info.clearValueCount = 2; - render_pass_info.pClearValues = clearColor; - - init.disp.cmdBeginRenderPass(this->command_buffers[i], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); - - init.disp.cmdBindPipeline(this->command_buffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, this->graphics_pipeline); - - init.disp.cmdDraw(this->command_buffers[i], 3, 1, 0, 0); - - init.disp.cmdEndRenderPass(this->command_buffers[i]); - - VkImageMemoryBarrier image_barrier = {}; - image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; - image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; - image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; - image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - image_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; - image_barrier.image = init.swapchain_images[i]; - init.disp.cmdPipelineBarrier( - command_buffers[i], - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - VK_PIPELINE_STAGE_TRANSFER_BIT, - 0, - 0, - nullptr, - 0, - nullptr, - 1, - &image_barrier - ); - - VkImageResolve region = {}; - region.extent.width = init.swapchain.extent.width; - region.extent.height = init.swapchain.extent.height; - region.extent.depth = 1; - region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - region.srcSubresource.layerCount = 1; - region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - region.dstSubresource.layerCount = 1; - init.disp.cmdResolveImage(command_buffers[i], render_targets[i], VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, init.swapchain_images[i], VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); - - image_barrier = {}; - image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; - image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; - image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; - image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - image_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; - image_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; - image_barrier.image = init.swapchain_images[i]; - init.disp.cmdPipelineBarrier( - command_buffers[i], - VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, - 0, - 0, - nullptr, - 0, - nullptr, - 1, - &image_barrier - ); - - result = init.disp.endCommandBuffer(this->command_buffers[i]); - VERIFY_VK_RESULT("failed to end command buffer", result); - } -} - -void App::recreate_swapchain() { +void App::recreate_swapchain() +{ init.disp.deviceWaitIdle(); - init.disp.destroyCommandPool(this->command_pool, nullptr); - for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { init.disp.destroyFramebuffer(framebuffers[i], nullptr); @@ -512,12 +399,6 @@ void App::recreate_swapchain() { create_render_targets(); create_depth_buffers(); create_framebuffers(); - - auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); - if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - - create_command_buffers(); } const int NUM_FRAMES = 10; @@ -528,56 +409,178 @@ bool App::frame(const int frame_num) init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); uint32_t image_index = 0; - VkResult result = init.disp.acquireNextImageKHR( + VkResult result = init.disp.acquireNextImageKHR( init.swapchain, UINT64_MAX, this->sync.available_semaphores[this->current_frame], VK_NULL_HANDLE, &image_index); - if (result == VK_ERROR_OUT_OF_DATE_KHR) { + if (result == VK_ERROR_OUT_OF_DATE_KHR) + { recreate_swapchain(); return IS_RUNNING(frame_num); - } else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) { + } + else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) + { throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); } - if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) { + if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) + { init.disp.waitForFences(1, &this->sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); } this->sync.image_in_flight[image_index] = this->sync.in_flight_fences[this->current_frame]; + init.disp.resetCommandPool(this->command_pools[current_frame], 0); + VkCommandBufferAllocateInfo allocate_info = {}; + allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + allocate_info.commandBufferCount = 1; + allocate_info.commandPool = this->command_pools[current_frame]; + VkCommandBuffer command_buffer; + result = init.disp.allocateCommandBuffers(&allocate_info, &command_buffer); + VERIFY_VK_RESULT("failed to allocate command buffer", result); + + { + VkCommandBufferBeginInfo begin_info = {}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + result = init.disp.beginCommandBuffer(command_buffer, &begin_info); + VERIFY_VK_RESULT("failed to create command buffer", result); + + { + VkImageMemoryBarrier image_barriers[2]; + image_barriers[0] = {}; + image_barriers[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barriers[0].image = render_targets[image_index]; + image_barriers[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_barriers[0].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + image_barriers[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_barriers[0].subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barriers[0].subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barriers[0].srcAccessMask = VK_ACCESS_NONE; + image_barriers[0].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + image_barriers[1] = {}; + image_barriers[1].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barriers[1].image = depth_images[image_index]; + image_barriers[1].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_barriers[1].newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + image_barriers[1].subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; + image_barriers[1].subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barriers[1].subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barriers[1].srcAccessMask = VK_ACCESS_NONE; + image_barriers[1].dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT|VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; + init.disp.cmdPipelineBarrier(command_buffer, + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + 0, + 0, + nullptr, + 0, + nullptr, + 2, + image_barriers); + } + + VkRenderPassBeginInfo render_pass_info = {}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + render_pass_info.renderPass = this->render_pass; + render_pass_info.framebuffer = this->framebuffers[image_index]; + render_pass_info.renderArea.offset = { 0, 0 }; + render_pass_info.renderArea.extent = init.swapchain.extent; + VkClearValue clearColors[] = { + { { { 0.0f, 0.0f, 0.0f, 1.0f } } }, + { { 1.0f } } + }; + render_pass_info.clearValueCount = 2; + render_pass_info.pClearValues = clearColors; + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + init.disp.cmdSetViewport(command_buffer, 0, 1, &viewport); + init.disp.cmdSetScissor(command_buffer, 0, 1, &scissor); + + init.disp.cmdBeginRenderPass(command_buffer, &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); + + init.disp.cmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, this->graphics_pipeline); + + init.disp.cmdDraw(command_buffer, 3, 1, 0, 0); + + init.disp.cmdEndRenderPass(command_buffer); + + { + VkMemoryBarrier memory_barrier = {}; + memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; + memory_barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + memory_barrier.dstAccessMask = VK_ACCESS_NONE; + VkImageMemoryBarrier image_barrier = {}; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.image = init.swapchain_images[image_index]; + image_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + image_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; + image_barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; + image_barrier.srcAccessMask = VK_ACCESS_NONE; + image_barrier.dstAccessMask = VK_ACCESS_NONE; + init.disp.cmdPipelineBarrier(command_buffer, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + 0, + 1, + &memory_barrier, + 0, + nullptr, + 1, + &image_barrier); + } + + result = init.disp.endCommandBuffer(command_buffer); + VERIFY_VK_RESULT("failed to end command buffer", result); + } + VkSubmitInfo submitInfo = {}; - submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; - VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; - submitInfo.waitSemaphoreCount = 1; - submitInfo.pWaitSemaphores = wait_semaphores; - submitInfo.pWaitDstStageMask = wait_stages; + VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT }; + submitInfo.waitSemaphoreCount = 1; + submitInfo.pWaitSemaphores = wait_semaphores; + submitInfo.pWaitDstStageMask = wait_stages; submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &this->command_buffers[this->current_frame]; + submitInfo.pCommandBuffers = &command_buffer; VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; submitInfo.signalSemaphoreCount = 1; - submitInfo.pSignalSemaphores = signal_semaphores; + submitInfo.pSignalSemaphores = signal_semaphores; init.disp.resetFences(1, &this->sync.in_flight_fences[this->current_frame]); - result = init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); + result = + init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); VERIFY_VK_RESULT("failed to submit queue", result); VkPresentInfoKHR present_info = {}; - present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; present_info.waitSemaphoreCount = 1; - present_info.pWaitSemaphores = signal_semaphores; + present_info.pWaitSemaphores = signal_semaphores; VkSwapchainKHR swapChains[] = { init.swapchain }; present_info.swapchainCount = 1; - present_info.pSwapchains = swapChains; + present_info.pSwapchains = swapChains; present_info.pImageIndices = &image_index; result = init.disp.queuePresentKHR(this->present_queue, &present_info); - if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) { + if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) + { recreate_swapchain(); return frame_num >= NUM_FRAMES; } @@ -597,7 +600,10 @@ void App::cleanup() init.disp.destroyFence(this->sync.in_flight_fences[i], nullptr); } - init.disp.destroyCommandPool(this->command_pool, nullptr); + for (auto command_pool : command_pools) + { + init.disp.destroyCommandPool(command_pool, nullptr); + } for (size_t i = 0; i < init.swapchain_image_views.size(); i++) { @@ -640,9 +646,9 @@ void App::setup() auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - this->command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); - - create_command_buffers(); + for (auto& command_pool : command_pools) { + command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); + } this->sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); } @@ -653,12 +659,16 @@ GFXRECON_END_NAMESPACE(test_app) GFXRECON_END_NAMESPACE(gfxrecon) -int main(int argc, char *argv[]) { - try { +int main(int argc, char* argv[]) +{ + try + { gfxrecon::test_app::multisample_depth::App app{}; app.run("multisample depth"); return 0; - } catch (std::exception e) { + } + catch (std::exception e) + { std::cout << e.what() << std::endl; return -1; } From 4d432ebbd1222f58376157850d879382827f2ceb Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 27 Oct 2024 15:20:50 -0400 Subject: [PATCH 24/70] Format --- test/test_apps/common/test_app_base.cpp | 1869 ++++-- test/test_apps/common/test_app_base.h | 497 +- test/test_apps/common/test_app_dispatch.h | 6797 ++++++++++++++------- test/test_apps/multisample-depth/app.cpp | 35 +- test/test_apps/triangle/app.cpp | 17 +- 5 files changed, 6201 insertions(+), 3014 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index e45ae47405..6bb8f2e87a 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -46,55 +46,77 @@ GFXRECON_BEGIN_NAMESPACE(test) GFXRECON_BEGIN_NAMESPACE(detail) -GenericFeaturesPNextNode::GenericFeaturesPNextNode() { memset(fields, UINT8_MAX, sizeof(VkBool32) * field_capacity); } +GenericFeaturesPNextNode::GenericFeaturesPNextNode() +{ + memset(fields, UINT8_MAX, sizeof(VkBool32) * field_capacity); +} -bool GenericFeaturesPNextNode::match(GenericFeaturesPNextNode const& requested, GenericFeaturesPNextNode const& supported) noexcept { +bool GenericFeaturesPNextNode::match(GenericFeaturesPNextNode const& requested, + GenericFeaturesPNextNode const& supported) noexcept +{ assert(requested.sType == supported.sType && "Non-matching sTypes in features nodes!"); - for (uint32_t i = 0; i < field_capacity; i++) { - if (requested.fields[i] && !supported.fields[i]) return false; + for (uint32_t i = 0; i < field_capacity; i++) + { + if (requested.fields[i] && !supported.fields[i]) + return false; } return true; } -void GenericFeaturesPNextNode::combine(GenericFeaturesPNextNode const& right) noexcept { +void GenericFeaturesPNextNode::combine(GenericFeaturesPNextNode const& right) noexcept +{ assert(sType == right.sType && "Non-matching sTypes in features nodes!"); - for (uint32_t i = 0; i < GenericFeaturesPNextNode::field_capacity; i++) { + for (uint32_t i = 0; i < GenericFeaturesPNextNode::field_capacity; i++) + { fields[i] = fields[i] || right.fields[i]; } } -bool GenericFeatureChain::match_all(GenericFeatureChain const& extension_requested) const noexcept { +bool GenericFeatureChain::match_all(GenericFeatureChain const& extension_requested) const noexcept +{ // Should only be false if extension_supported was unable to be filled out, due to the // physical device not supporting vkGetPhysicalDeviceFeatures2 in any capacity. - if (extension_requested.nodes.size() != nodes.size()) { + if (extension_requested.nodes.size() != nodes.size()) + { return false; } - for (size_t i = 0; i < nodes.size() && i < nodes.size(); ++i) { - if (!GenericFeaturesPNextNode::match(extension_requested.nodes[i], nodes[i])) return false; + for (size_t i = 0; i < nodes.size() && i < nodes.size(); ++i) + { + if (!GenericFeaturesPNextNode::match(extension_requested.nodes[i], nodes[i])) + return false; } return true; } -bool GenericFeatureChain::find_and_match(GenericFeatureChain const& extensions_requested) const noexcept { - for (const auto& requested_extension_node : extensions_requested.nodes) { +bool GenericFeatureChain::find_and_match(GenericFeatureChain const& extensions_requested) const noexcept +{ + for (const auto& requested_extension_node : extensions_requested.nodes) + { bool found = false; - for (const auto& supported_node : nodes) { - if (supported_node.sType == requested_extension_node.sType) { + for (const auto& supported_node : nodes) + { + if (supported_node.sType == requested_extension_node.sType) + { found = true; - if (!GenericFeaturesPNextNode::match(requested_extension_node, supported_node)) return false; + if (!GenericFeaturesPNextNode::match(requested_extension_node, supported_node)) + return false; break; } } - if (!found) return false; + if (!found) + return false; } return true; } -void GenericFeatureChain::chain_up(VkPhysicalDeviceFeatures2& feats2) noexcept { +void GenericFeatureChain::chain_up(VkPhysicalDeviceFeatures2& feats2) noexcept +{ detail::GenericFeaturesPNextNode* prev = nullptr; - for (auto& extension : nodes) { - if (prev != nullptr) { + for (auto& extension : nodes) + { + if (prev != nullptr) + { prev->pNext = &extension; } prev = &extension; @@ -103,23 +125,28 @@ void GenericFeatureChain::chain_up(VkPhysicalDeviceFeatures2& feats2) noexcept { feats2.pNext = !nodes.empty() ? &nodes.at(0) : nullptr; } -void GenericFeatureChain::combine(GenericFeatureChain const& right) noexcept { - for (const auto& right_node : right.nodes) { +void GenericFeatureChain::combine(GenericFeatureChain const& right) noexcept +{ + for (const auto& right_node : right.nodes) + { bool already_contained = false; - for (auto& left_node : nodes) { - if (left_node.sType == right_node.sType) { + for (auto& left_node : nodes) + { + if (left_node.sType == right_node.sType) + { left_node.combine(right_node); already_contained = true; } } - if (!already_contained) { + if (!already_contained) + { nodes.push_back(right_node); } } } - -class VulkanFunctions { +class VulkanFunctions +{ private: std::mutex init_mutex; @@ -129,36 +156,45 @@ class VulkanFunctions { HMODULE library = nullptr; #endif - bool load_vulkan_library() { + bool load_vulkan_library() + { // Can immediately return if it has already been loaded - if (library) { + if (library) + { return true; } #if defined(__linux__) library = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); - if (!library) library = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); + if (!library) + library = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); #elif defined(__APPLE__) library = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); - if (!library) library = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); - if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); + if (!library) + library = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); + if (!library) + library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); #elif defined(_WIN32) library = LoadLibrary(TEXT("vulkan-1.dll")); #else assert(false && "Unsupported platform"); #endif - if (!library) return false; + if (!library) + return false; load_func(ptr_vkGetInstanceProcAddr, "vkGetInstanceProcAddr"); return ptr_vkGetInstanceProcAddr != nullptr; } - template void load_func(T& func_dest, const char* func_name) { + template + void load_func(T& func_dest, const char* func_name) + { #if defined(__linux__) || defined(__APPLE__) func_dest = reinterpret_cast(dlsym(library, func_name)); #elif defined(_WIN32) func_dest = reinterpret_cast(GetProcAddress(library, func_name)); #endif } - void close() { + void close() + { #if defined(__linux__) || defined(__APPLE__) dlclose(library); #elif defined(_WIN32) @@ -168,13 +204,18 @@ class VulkanFunctions { } public: - bool init_vulkan_funcs(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr) { + bool init_vulkan_funcs(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr) + { std::lock_guard lg(init_mutex); - if (fp_vkGetInstanceProcAddr != nullptr) { + if (fp_vkGetInstanceProcAddr != nullptr) + { ptr_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; - } else { + } + else + { bool ret = load_vulkan_library(); - if (!ret) return false; + if (!ret) + return false; } fp_vkEnumerateInstanceExtensionProperties = reinterpret_cast( @@ -189,44 +230,49 @@ class VulkanFunctions { } public: - template void get_inst_proc_addr(T& out_ptr, const char* func_name) { + template + void get_inst_proc_addr(T& out_ptr, const char* func_name) + { out_ptr = reinterpret_cast(ptr_vkGetInstanceProcAddr(instance, func_name)); } - template void get_device_proc_addr(VkDevice device, T& out_ptr, const char* func_name) { + template + void get_device_proc_addr(VkDevice device, T& out_ptr, const char* func_name) + { out_ptr = reinterpret_cast(fp_vkGetDeviceProcAddr(device, func_name)); } PFN_vkGetInstanceProcAddr ptr_vkGetInstanceProcAddr = nullptr; - VkInstance instance = nullptr; + VkInstance instance = nullptr; PFN_vkEnumerateInstanceExtensionProperties fp_vkEnumerateInstanceExtensionProperties = nullptr; - PFN_vkEnumerateInstanceLayerProperties fp_vkEnumerateInstanceLayerProperties = nullptr; - PFN_vkEnumerateInstanceVersion fp_vkEnumerateInstanceVersion = nullptr; - PFN_vkCreateInstance fp_vkCreateInstance = nullptr; - - PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; - PFN_vkCreateDebugUtilsMessengerEXT fp_vkCreateDebugUtilsMessengerEXT = nullptr; - PFN_vkDestroyDebugUtilsMessengerEXT fp_vkDestroyDebugUtilsMessengerEXT = nullptr; - PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; - PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; - PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr; - PFN_vkGetPhysicalDeviceFeatures2KHR fp_vkGetPhysicalDeviceFeatures2KHR = nullptr; - PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; + PFN_vkEnumerateInstanceLayerProperties fp_vkEnumerateInstanceLayerProperties = nullptr; + PFN_vkEnumerateInstanceVersion fp_vkEnumerateInstanceVersion = nullptr; + PFN_vkCreateInstance fp_vkCreateInstance = nullptr; + + PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; + PFN_vkCreateDebugUtilsMessengerEXT fp_vkCreateDebugUtilsMessengerEXT = nullptr; + PFN_vkDestroyDebugUtilsMessengerEXT fp_vkDestroyDebugUtilsMessengerEXT = nullptr; + PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; + PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; + PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr; + PFN_vkGetPhysicalDeviceFeatures2KHR fp_vkGetPhysicalDeviceFeatures2KHR = nullptr; + PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr; - PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; - PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; + PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; - PFN_vkCreateDevice fp_vkCreateDevice = nullptr; + PFN_vkCreateDevice fp_vkCreateDevice = nullptr; PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; - PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; + PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fp_vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; - void init_instance_funcs(VkInstance inst) { + void init_instance_funcs(VkInstance inst) + { instance = inst; get_inst_proc_addr(fp_vkDestroyInstance, "vkDestroyInstance"); get_inst_proc_addr(fp_vkCreateDebugUtilsMessengerEXT, "vkCreateDebugUtilsMessengerEXT"); @@ -252,18 +298,23 @@ class VulkanFunctions { } }; -static VulkanFunctions& vulkan_functions() { +static VulkanFunctions& vulkan_functions() +{ static VulkanFunctions v; return v; } // Helper for robustly executing the two-call pattern -template auto get_vector(std::vector& out, F&& f, Ts&&... ts) -> VkResult { +template +auto get_vector(std::vector& out, F&& f, Ts&&... ts) -> VkResult +{ uint32_t count = 0; VkResult err; - do { + do + { err = f(ts..., &count, nullptr); - if (err != VK_SUCCESS) { + if (err != VK_SUCCESS) + { return err; }; out.resize(count); @@ -273,8 +324,10 @@ template auto get_vector(std::vector return err; } -template auto get_vector_noerror(F&& f, Ts&&... ts) -> std::vector { - uint32_t count = 0; +template +auto get_vector_noerror(F&& f, Ts&&... ts) -> std::vector +{ + uint32_t count = 0; std::vector results; f(ts..., &count, nullptr); results.resize(count); @@ -285,8 +338,10 @@ template auto get_vector_noerror(F&& f, GFXRECON_END_NAMESPACE(detail) -const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s) { - switch (s) { +const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s) +{ + switch (s) + { case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: return "VERBOSE"; case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: @@ -299,95 +354,131 @@ const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s) return "UNKNOWN"; } } -const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s) { - if (s == 7) return "General | Validation | Performance"; - if (s == 6) return "Validation | Performance"; - if (s == 5) return "General | Performance"; - if (s == 4 /*VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT*/) return "Performance"; - if (s == 3) return "General | Validation"; - if (s == 2 /*VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT*/) return "Validation"; - if (s == 1 /*VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT*/) return "General"; +const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s) +{ + if (s == 7) + return "General | Validation | Performance"; + if (s == 6) + return "Validation | Performance"; + if (s == 5) + return "General | Performance"; + if (s == 4 /*VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT*/) + return "Performance"; + if (s == 3) + return "General | Validation"; + if (s == 2 /*VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT*/) + return "Validation"; + if (s == 1 /*VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT*/) + return "General"; return "Unknown"; } -VkResult create_debug_utils_messenger(VkInstance instance, +VkResult create_debug_utils_messenger(VkInstance instance, PFN_vkDebugUtilsMessengerCallbackEXT debug_callback, - VkDebugUtilsMessageSeverityFlagsEXT severity, - VkDebugUtilsMessageTypeFlagsEXT type, - void* user_data_pointer, - VkDebugUtilsMessengerEXT* pDebugMessenger, - VkAllocationCallbacks* allocation_callbacks) { + VkDebugUtilsMessageSeverityFlagsEXT severity, + VkDebugUtilsMessageTypeFlagsEXT type, + void* user_data_pointer, + VkDebugUtilsMessengerEXT* pDebugMessenger, + VkAllocationCallbacks* allocation_callbacks) +{ - if (debug_callback == nullptr) debug_callback = default_debug_callback; + if (debug_callback == nullptr) + debug_callback = default_debug_callback; VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {}; - messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; - messengerCreateInfo.pNext = nullptr; - messengerCreateInfo.messageSeverity = severity; - messengerCreateInfo.messageType = type; - messengerCreateInfo.pfnUserCallback = debug_callback; - messengerCreateInfo.pUserData = user_data_pointer; - - if (detail::vulkan_functions().fp_vkCreateDebugUtilsMessengerEXT != nullptr) { + messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + messengerCreateInfo.pNext = nullptr; + messengerCreateInfo.messageSeverity = severity; + messengerCreateInfo.messageType = type; + messengerCreateInfo.pfnUserCallback = debug_callback; + messengerCreateInfo.pUserData = user_data_pointer; + + if (detail::vulkan_functions().fp_vkCreateDebugUtilsMessengerEXT != nullptr) + { return detail::vulkan_functions().fp_vkCreateDebugUtilsMessengerEXT( instance, &messengerCreateInfo, allocation_callbacks, pDebugMessenger); - } else { + } + else + { return VK_ERROR_EXTENSION_NOT_PRESENT; } } -void destroy_debug_utils_messenger( - VkInstance instance, VkDebugUtilsMessengerEXT debugMessenger, VkAllocationCallbacks* allocation_callbacks) { +void destroy_debug_utils_messenger(VkInstance instance, + VkDebugUtilsMessengerEXT debugMessenger, + VkAllocationCallbacks* allocation_callbacks) +{ - if (detail::vulkan_functions().fp_vkDestroyDebugUtilsMessengerEXT != nullptr) { + if (detail::vulkan_functions().fp_vkDestroyDebugUtilsMessengerEXT != nullptr) + { detail::vulkan_functions().fp_vkDestroyDebugUtilsMessengerEXT(instance, debugMessenger, allocation_callbacks); } } GFXRECON_BEGIN_NAMESPACE(detail) -bool check_layer_supported(std::vector const& available_layers, const char* layer_name) { - if (!layer_name) return false; - for (const auto& layer_properties : available_layers) { - if (strcmp(layer_name, layer_properties.layerName) == 0) { +bool check_layer_supported(std::vector const& available_layers, const char* layer_name) +{ + if (!layer_name) + return false; + for (const auto& layer_properties : available_layers) + { + if (strcmp(layer_name, layer_properties.layerName) == 0) + { return true; } } return false; } -bool check_layers_supported(std::vector const& available_layers, std::vector const& layer_names) { +bool check_layers_supported(std::vector const& available_layers, + std::vector const& layer_names) +{ bool all_found = true; - for (const auto& layer_name : layer_names) { + for (const auto& layer_name : layer_names) + { bool found = check_layer_supported(available_layers, layer_name); - if (!found) all_found = false; + if (!found) + all_found = false; } return all_found; } -bool check_extension_supported(std::vector const& available_extensions, const char* extension_name) { - if (!extension_name) return false; - for (const auto& extension_properties : available_extensions) { - if (strcmp(extension_name, extension_properties.extensionName) == 0) { +bool check_extension_supported(std::vector const& available_extensions, + const char* extension_name) +{ + if (!extension_name) + return false; + for (const auto& extension_properties : available_extensions) + { + if (strcmp(extension_name, extension_properties.extensionName) == 0) + { return true; } } return false; } -bool check_extensions_supported( - std::vector const& available_extensions, std::vector const& extension_names) { +bool check_extensions_supported(std::vector const& available_extensions, + std::vector const& extension_names) +{ bool all_found = true; - for (const auto& extension_name : extension_names) { + for (const auto& extension_name : extension_names) + { bool found = check_extension_supported(available_extensions, extension_name); - if (!found) all_found = false; + if (!found) + all_found = false; } return all_found; } -template void setup_pNext_chain(T& structure, std::vector const& structs) { +template +void setup_pNext_chain(T& structure, std::vector const& structs) +{ structure.pNext = nullptr; - if (structs.size() <= 0) return; - for (size_t i = 0; i < structs.size() - 1; i++) { + if (structs.size() <= 0) + return; + for (size_t i = 0; i < structs.size() - 1; i++) + { structs.at(i)->pNext = structs.at(i + 1); } structure.pNext = structs.at(0); @@ -396,12 +487,14 @@ const char* validation_layer_name = "VK_LAYER_KHRONOS_validation"; GFXRECON_END_NAMESPACE(detail) -#define CASE_TO_STRING(CATEGORY, TYPE) \ - case CATEGORY::TYPE: \ +#define CASE_TO_STRING(CATEGORY, TYPE) \ + case CATEGORY::TYPE: \ return #TYPE; -const char* to_string(InstanceError err) { - switch (err) { +const char* to_string(InstanceError err) +{ + switch (err) + { CASE_TO_STRING(InstanceError, vulkan_unavailable) CASE_TO_STRING(InstanceError, vulkan_version_unavailable) CASE_TO_STRING(InstanceError, vulkan_version_1_1_unavailable) @@ -415,8 +508,10 @@ const char* to_string(InstanceError err) { return ""; } } -const char* to_string(PhysicalDeviceError err) { - switch (err) { +const char* to_string(PhysicalDeviceError err) +{ + switch (err) + { CASE_TO_STRING(PhysicalDeviceError, no_surface_provided) CASE_TO_STRING(PhysicalDeviceError, failed_enumerate_physical_devices) CASE_TO_STRING(PhysicalDeviceError, no_physical_devices_found) @@ -425,8 +520,10 @@ const char* to_string(PhysicalDeviceError err) { return ""; } } -const char* to_string(QueueError err) { - switch (err) { +const char* to_string(QueueError err) +{ + switch (err) + { CASE_TO_STRING(QueueError, present_unavailable) CASE_TO_STRING(QueueError, graphics_unavailable) CASE_TO_STRING(QueueError, compute_unavailable) @@ -437,15 +534,19 @@ const char* to_string(QueueError err) { return ""; } } -const char* to_string(DeviceError err) { - switch (err) { +const char* to_string(DeviceError err) +{ + switch (err) + { CASE_TO_STRING(DeviceError, failed_create_device) default: return ""; } } -const char* to_string(SwapchainError err) { - switch (err) { +const char* to_string(SwapchainError err) +{ + switch (err) + { CASE_TO_STRING(SwapchainError, surface_handle_not_provided) CASE_TO_STRING(SwapchainError, failed_query_surface_support_details) CASE_TO_STRING(SwapchainError, failed_create_swapchain) @@ -458,10 +559,12 @@ const char* to_string(SwapchainError err) { } } -std::exception to_exception(InstanceError error) { +std::exception to_exception(InstanceError error) +{ return std::runtime_error(to_string(error)); } -std::exception to_exception(InstanceError error, VkResult result) { +std::exception to_exception(InstanceError error, VkResult result) +{ std::string message{}; message.append(to_string(error)); message.append(": "); @@ -469,10 +572,12 @@ std::exception to_exception(InstanceError error, VkResult result) { return std::runtime_error(message); } -std::exception to_exception(PhysicalDeviceError error) { +std::exception to_exception(PhysicalDeviceError error) +{ return std::runtime_error(to_string(error)); } -std::exception to_exception(PhysicalDeviceError error, VkResult result) { +std::exception to_exception(PhysicalDeviceError error, VkResult result) +{ std::string message{}; message.append(to_string(error)); message.append(": "); @@ -480,10 +585,12 @@ std::exception to_exception(PhysicalDeviceError error, VkResult result) { return std::runtime_error(message); } -std::exception to_exception(QueueError error) { +std::exception to_exception(QueueError error) +{ return std::runtime_error(to_string(error)); } -std::exception to_exception(QueueError error, VkResult result) { +std::exception to_exception(QueueError error, VkResult result) +{ std::string message{}; message.append(to_string(error)); message.append(": "); @@ -491,10 +598,12 @@ std::exception to_exception(QueueError error, VkResult result) { return std::runtime_error(message); } -std::exception to_exception(DeviceError error) { +std::exception to_exception(DeviceError error) +{ return std::runtime_error(to_string(error)); } -std::exception to_exception(DeviceError error, VkResult result) { +std::exception to_exception(DeviceError error, VkResult result) +{ std::string message{}; message.append(to_string(error)); message.append(": "); @@ -502,10 +611,12 @@ std::exception to_exception(DeviceError error, VkResult result) { return std::runtime_error(message); } -std::exception to_exception(SwapchainError error) { +std::exception to_exception(SwapchainError error) +{ return std::runtime_error(to_string(error)); } -std::exception to_exception(SwapchainError error, VkResult result) { +std::exception to_exception(SwapchainError error, VkResult result) +{ std::string message{}; message.append(to_string(error)); message.append(": "); @@ -513,111 +624,147 @@ std::exception to_exception(SwapchainError error, VkResult result) { return std::runtime_error(message); } -SystemInfo SystemInfo::get_system_info() { - if (!detail::vulkan_functions().init_vulkan_funcs(nullptr)) { +SystemInfo SystemInfo::get_system_info() +{ + if (!detail::vulkan_functions().init_vulkan_funcs(nullptr)) + { throw to_exception(InstanceError::vulkan_unavailable); } return SystemInfo(); } -SystemInfo SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { +SystemInfo SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) +{ // Using externally provided function pointers, assume the loader is available - if (!detail::vulkan_functions().init_vulkan_funcs(fp_vkGetInstanceProcAddr)) { + if (!detail::vulkan_functions().init_vulkan_funcs(fp_vkGetInstanceProcAddr)) + { throw to_exception(InstanceError::vulkan_unavailable); } return SystemInfo(); } -SystemInfo::SystemInfo() { +SystemInfo::SystemInfo() +{ auto available_layers_ret = detail::get_vector( this->available_layers, detail::vulkan_functions().fp_vkEnumerateInstanceLayerProperties); - if (available_layers_ret != VK_SUCCESS) { + if (available_layers_ret != VK_SUCCESS) + { this->available_layers.clear(); } for (auto& layer : this->available_layers) - if (strcmp(layer.layerName, detail::validation_layer_name) == 0) validation_layers_available = true; + if (strcmp(layer.layerName, detail::validation_layer_name) == 0) + validation_layers_available = true; auto available_extensions_ret = detail::get_vector( this->available_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, nullptr); - if (available_extensions_ret != VK_SUCCESS) { + if (available_extensions_ret != VK_SUCCESS) + { this->available_extensions.clear(); } - for (auto& ext : this->available_extensions) { - if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) { + for (auto& ext : this->available_extensions) + { + if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) + { debug_utils_available = true; } } - for (auto& layer : this->available_layers) { + for (auto& layer : this->available_layers) + { std::vector layer_extensions; - auto layer_extensions_ret = detail::get_vector( + auto layer_extensions_ret = detail::get_vector( layer_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, layer.layerName); - if (layer_extensions_ret == VK_SUCCESS) { + if (layer_extensions_ret == VK_SUCCESS) + { this->available_extensions.insert( this->available_extensions.end(), layer_extensions.begin(), layer_extensions.end()); - for (auto& ext : layer_extensions) { - if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) { + for (auto& ext : layer_extensions) + { + if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) + { debug_utils_available = true; } } } } } -bool SystemInfo::is_extension_available(const char* extension_name) const { - if (!extension_name) return false; +bool SystemInfo::is_extension_available(const char* extension_name) const +{ + if (!extension_name) + return false; return detail::check_extension_supported(available_extensions, extension_name); } -bool SystemInfo::is_layer_available(const char* layer_name) const { - if (!layer_name) return false; +bool SystemInfo::is_layer_available(const char* layer_name) const +{ + if (!layer_name) + return false; return detail::check_layer_supported(available_layers, layer_name); } -void destroy_surface(Instance const& instance, VkSurfaceKHR surface) { - if (instance.instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) { +void destroy_surface(Instance const& instance, VkSurfaceKHR surface) +{ + if (instance.instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) + { detail::vulkan_functions().fp_vkDestroySurfaceKHR(instance.instance, surface, instance.allocation_callbacks); } } -void destroy_surface(VkInstance instance, VkSurfaceKHR surface, VkAllocationCallbacks* callbacks) { - if (instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) { +void destroy_surface(VkInstance instance, VkSurfaceKHR surface, VkAllocationCallbacks* callbacks) +{ + if (instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) + { detail::vulkan_functions().fp_vkDestroySurfaceKHR(instance, surface, callbacks); } } -void destroy_instance(Instance const& instance) { - if (instance.instance != VK_NULL_HANDLE) { +void destroy_instance(Instance const& instance) +{ + if (instance.instance != VK_NULL_HANDLE) + { if (instance.debug_messenger != VK_NULL_HANDLE) destroy_debug_utils_messenger(instance.instance, instance.debug_messenger, instance.allocation_callbacks); detail::vulkan_functions().fp_vkDestroyInstance(instance.instance, instance.allocation_callbacks); } } -Instance::operator VkInstance() const { return this->instance; } +Instance::operator VkInstance() const +{ + return this->instance; +} -vkb::InstanceDispatchTable Instance::make_table() const { return { instance, fp_vkGetInstanceProcAddr }; } +vkb::InstanceDispatchTable Instance::make_table() const +{ + return { instance, fp_vkGetInstanceProcAddr }; +} -InstanceBuilder::InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { +InstanceBuilder::InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) +{ info.fp_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; } InstanceBuilder::InstanceBuilder() {} -Instance InstanceBuilder::build() const { +Instance InstanceBuilder::build() const +{ auto system = SystemInfo::get_system_info(info.fp_vkGetInstanceProcAddr); uint32_t instance_version = VKB_VK_API_VERSION_1_0; if (info.minimum_instance_version > VKB_VK_API_VERSION_1_0 || info.required_api_version > VKB_VK_API_VERSION_1_0 || - info.desired_api_version > VKB_VK_API_VERSION_1_0) { - PFN_vkEnumerateInstanceVersion pfn_vkEnumerateInstanceVersion = detail::vulkan_functions().fp_vkEnumerateInstanceVersion; + info.desired_api_version > VKB_VK_API_VERSION_1_0) + { + PFN_vkEnumerateInstanceVersion pfn_vkEnumerateInstanceVersion = + detail::vulkan_functions().fp_vkEnumerateInstanceVersion; - if (pfn_vkEnumerateInstanceVersion != nullptr) { + if (pfn_vkEnumerateInstanceVersion != nullptr) + { VkResult res = pfn_vkEnumerateInstanceVersion(&instance_version); // Should always return VK_SUCCESS if (res != VK_SUCCESS && info.required_api_version > 0) throw to_exception(InstanceError::vulkan_version_unavailable); } if (pfn_vkEnumerateInstanceVersion == nullptr || instance_version < info.minimum_instance_version || - (info.minimum_instance_version == 0 && instance_version < info.required_api_version)) { + (info.minimum_instance_version == 0 && instance_version < info.required_api_version)) + { if (VK_VERSION_MINOR(info.required_api_version) == 2) throw to_exception(InstanceError::vulkan_version_1_2_unavailable); else if (VK_VERSION_MINOR(info.required_api_version)) @@ -629,47 +776,53 @@ Instance InstanceBuilder::build() const { uint32_t api_version = instance_version < VKB_VK_API_VERSION_1_1 ? instance_version : info.required_api_version; - if (info.desired_api_version > VKB_VK_API_VERSION_1_0 && instance_version >= info.desired_api_version) { + if (info.desired_api_version > VKB_VK_API_VERSION_1_0 && instance_version >= info.desired_api_version) + { instance_version = info.desired_api_version; - api_version = info.desired_api_version; + api_version = info.desired_api_version; } - VkApplicationInfo app_info = {}; - app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; - app_info.pNext = nullptr; - app_info.pApplicationName = info.app_name != nullptr ? info.app_name : ""; + VkApplicationInfo app_info = {}; + app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + app_info.pNext = nullptr; + app_info.pApplicationName = info.app_name != nullptr ? info.app_name : ""; app_info.applicationVersion = info.application_version; - app_info.pEngineName = info.engine_name != nullptr ? info.engine_name : ""; - app_info.engineVersion = info.engine_version; - app_info.apiVersion = api_version; + app_info.pEngineName = info.engine_name != nullptr ? info.engine_name : ""; + app_info.engineVersion = info.engine_version; + app_info.apiVersion = api_version; std::vector extensions; std::vector layers; - for (auto& ext : info.extensions) - extensions.push_back(ext); - if (info.debug_callback != nullptr && info.use_debug_messenger && system.debug_utils_available) { + for (auto& ext : info.extensions) extensions.push_back(ext); + if (info.debug_callback != nullptr && info.use_debug_messenger && system.debug_utils_available) + { extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); } bool properties2_ext_enabled = - api_version < VKB_VK_API_VERSION_1_1 && detail::check_extension_supported(system.available_extensions, - VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); - if (properties2_ext_enabled) { + api_version < VKB_VK_API_VERSION_1_1 && + detail::check_extension_supported(system.available_extensions, + VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); + if (properties2_ext_enabled) + { extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME); } #if defined(VK_KHR_portability_enumeration) bool portability_enumeration_support = detail::check_extension_supported(system.available_extensions, VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); - if (portability_enumeration_support) { + if (portability_enumeration_support) + { extensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); } #else bool portability_enumeration_support = false; #endif - if (!info.headless_context) { + if (!info.headless_context) + { auto check_add_window_ext = [&](const char* name) -> bool { - if (!detail::check_extension_supported(system.available_extensions, name)) return false; + if (!detail::check_extension_supported(system.available_extensions, name)) + return false; extensions.push_back(name); return true; }; @@ -683,8 +836,8 @@ Instance InstanceBuilder::build() const { #elif defined(__linux__) // make sure all three calls to check_add_window_ext, don't allow short circuiting bool added_window_exts = check_add_window_ext("VK_KHR_xcb_surface"); - added_window_exts = check_add_window_ext("VK_KHR_xlib_surface") || added_window_exts; - added_window_exts = check_add_window_ext("VK_KHR_wayland_surface") || added_window_exts; + added_window_exts = check_add_window_ext("VK_KHR_xlib_surface") || added_window_exts; + added_window_exts = check_add_window_ext("VK_KHR_wayland_surface") || added_window_exts; #elif defined(__APPLE__) bool added_window_exts = check_add_window_ext("VK_EXT_metal_surface"); #endif @@ -692,82 +845,91 @@ Instance InstanceBuilder::build() const { throw to_exception(InstanceError::windowing_extensions_not_present); } bool all_extensions_supported = detail::check_extensions_supported(system.available_extensions, extensions); - if (!all_extensions_supported) { + if (!all_extensions_supported) + { throw to_exception(InstanceError::requested_extensions_not_present); } - for (auto& layer : info.layers) - layers.push_back(layer); + for (auto& layer : info.layers) layers.push_back(layer); - if (info.enable_validation_layers || (info.request_validation_layers && system.validation_layers_available)) { + if (info.enable_validation_layers || (info.request_validation_layers && system.validation_layers_available)) + { layers.push_back(detail::validation_layer_name); } bool all_layers_supported = detail::check_layers_supported(system.available_layers, layers); - if (!all_layers_supported) { + if (!all_layers_supported) + { throw to_exception(InstanceError::requested_layers_not_present); } std::vector pNext_chain; VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {}; - if (info.use_debug_messenger) { - messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; - messengerCreateInfo.pNext = nullptr; + if (info.use_debug_messenger) + { + messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + messengerCreateInfo.pNext = nullptr; messengerCreateInfo.messageSeverity = info.debug_message_severity; - messengerCreateInfo.messageType = info.debug_message_type; + messengerCreateInfo.messageType = info.debug_message_type; messengerCreateInfo.pfnUserCallback = info.debug_callback; - messengerCreateInfo.pUserData = info.debug_user_data_pointer; + messengerCreateInfo.pUserData = info.debug_user_data_pointer; pNext_chain.push_back(reinterpret_cast(&messengerCreateInfo)); } VkValidationFeaturesEXT features{}; - if (info.enabled_validation_features.size() != 0 || info.disabled_validation_features.size()) { - features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT; - features.pNext = nullptr; - features.enabledValidationFeatureCount = static_cast(info.enabled_validation_features.size()); - features.pEnabledValidationFeatures = info.enabled_validation_features.data(); + if (info.enabled_validation_features.size() != 0 || info.disabled_validation_features.size()) + { + features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT; + features.pNext = nullptr; + features.enabledValidationFeatureCount = static_cast(info.enabled_validation_features.size()); + features.pEnabledValidationFeatures = info.enabled_validation_features.data(); features.disabledValidationFeatureCount = static_cast(info.disabled_validation_features.size()); - features.pDisabledValidationFeatures = info.disabled_validation_features.data(); + features.pDisabledValidationFeatures = info.disabled_validation_features.data(); pNext_chain.push_back(reinterpret_cast(&features)); } VkValidationFlagsEXT checks{}; - if (info.disabled_validation_checks.size() != 0) { - checks.sType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT; - checks.pNext = nullptr; + if (info.disabled_validation_checks.size() != 0) + { + checks.sType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT; + checks.pNext = nullptr; checks.disabledValidationCheckCount = static_cast(info.disabled_validation_checks.size()); - checks.pDisabledValidationChecks = info.disabled_validation_checks.data(); + checks.pDisabledValidationChecks = info.disabled_validation_checks.data(); pNext_chain.push_back(reinterpret_cast(&checks)); } VkInstanceCreateInfo instance_create_info = {}; - instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; detail::setup_pNext_chain(instance_create_info, pNext_chain); #if !defined(NDEBUG) - for (auto& node : pNext_chain) { + for (auto& node : pNext_chain) + { assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO); } #endif - instance_create_info.flags = info.flags; - instance_create_info.pApplicationInfo = &app_info; - instance_create_info.enabledExtensionCount = static_cast(extensions.size()); + instance_create_info.flags = info.flags; + instance_create_info.pApplicationInfo = &app_info; + instance_create_info.enabledExtensionCount = static_cast(extensions.size()); instance_create_info.ppEnabledExtensionNames = extensions.data(); - instance_create_info.enabledLayerCount = static_cast(layers.size()); - instance_create_info.ppEnabledLayerNames = layers.data(); + instance_create_info.enabledLayerCount = static_cast(layers.size()); + instance_create_info.ppEnabledLayerNames = layers.data(); #if defined(VK_KHR_portability_enumeration) - if (portability_enumeration_support) { + if (portability_enumeration_support) + { instance_create_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR; } #endif Instance instance; - VkResult res = - detail::vulkan_functions().fp_vkCreateInstance(&instance_create_info, info.allocation_callbacks, &instance.instance); - if (res != VK_SUCCESS) throw to_exception(InstanceError::failed_create_instance, res); + VkResult res = detail::vulkan_functions().fp_vkCreateInstance( + &instance_create_info, info.allocation_callbacks, &instance.instance); + if (res != VK_SUCCESS) + throw to_exception(InstanceError::failed_create_instance, res); detail::vulkan_functions().init_instance_funcs(instance.instance); - if (info.use_debug_messenger) { + if (info.use_debug_messenger) + { res = create_debug_utils_messenger(instance.instance, info.debug_callback, info.debug_message_severity, @@ -775,165 +937,206 @@ Instance InstanceBuilder::build() const { info.debug_user_data_pointer, &instance.debug_messenger, info.allocation_callbacks); - if (res != VK_SUCCESS) throw to_exception(InstanceError::failed_create_debug_messenger, res); + if (res != VK_SUCCESS) + throw to_exception(InstanceError::failed_create_debug_messenger, res); } - instance.headless = info.headless_context; - instance.properties2_ext_enabled = properties2_ext_enabled; - instance.allocation_callbacks = info.allocation_callbacks; - instance.instance_version = instance_version; - instance.api_version = api_version; + instance.headless = info.headless_context; + instance.properties2_ext_enabled = properties2_ext_enabled; + instance.allocation_callbacks = info.allocation_callbacks; + instance.instance_version = instance_version; + instance.api_version = api_version; instance.fp_vkGetInstanceProcAddr = detail::vulkan_functions().ptr_vkGetInstanceProcAddr; - instance.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr; + instance.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr; return instance; } -InstanceBuilder& InstanceBuilder::set_app_name(const char* app_name) { - if (!app_name) return *this; +InstanceBuilder& InstanceBuilder::set_app_name(const char* app_name) +{ + if (!app_name) + return *this; info.app_name = app_name; return *this; } -InstanceBuilder& InstanceBuilder::set_engine_name(const char* engine_name) { - if (!engine_name) return *this; +InstanceBuilder& InstanceBuilder::set_engine_name(const char* engine_name) +{ + if (!engine_name) + return *this; info.engine_name = engine_name; return *this; } -InstanceBuilder& InstanceBuilder::set_app_version(uint32_t app_version) { +InstanceBuilder& InstanceBuilder::set_app_version(uint32_t app_version) +{ info.application_version = app_version; return *this; } -InstanceBuilder& InstanceBuilder::set_app_version(uint32_t major, uint32_t minor, uint32_t patch) { +InstanceBuilder& InstanceBuilder::set_app_version(uint32_t major, uint32_t minor, uint32_t patch) +{ info.application_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); return *this; } -InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t engine_version) { +InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t engine_version) +{ info.engine_version = engine_version; return *this; } -InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t major, uint32_t minor, uint32_t patch) { +InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t major, uint32_t minor, uint32_t patch) +{ info.engine_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); return *this; } -InstanceBuilder& InstanceBuilder::require_api_version(uint32_t required_api_version) { +InstanceBuilder& InstanceBuilder::require_api_version(uint32_t required_api_version) +{ info.required_api_version = required_api_version; return *this; } -InstanceBuilder& InstanceBuilder::require_api_version(uint32_t major, uint32_t minor, uint32_t patch) { +InstanceBuilder& InstanceBuilder::require_api_version(uint32_t major, uint32_t minor, uint32_t patch) +{ info.required_api_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); return *this; } -InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t minimum_instance_version) { +InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t minimum_instance_version) +{ info.minimum_instance_version = minimum_instance_version; return *this; } -InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t major, uint32_t minor, uint32_t patch) { +InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t major, uint32_t minor, uint32_t patch) +{ info.minimum_instance_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); return *this; } -InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t preferred_vulkan_version) { +InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t preferred_vulkan_version) +{ info.desired_api_version = preferred_vulkan_version; return *this; } -InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t major, uint32_t minor, uint32_t patch) { +InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t major, uint32_t minor, uint32_t patch) +{ info.desired_api_version = VKB_MAKE_VK_VERSION(0, major, minor, patch); return *this; } -InstanceBuilder& InstanceBuilder::enable_layer(const char* layer_name) { - if (!layer_name) return *this; +InstanceBuilder& InstanceBuilder::enable_layer(const char* layer_name) +{ + if (!layer_name) + return *this; info.layers.push_back(layer_name); return *this; } -InstanceBuilder& InstanceBuilder::enable_extension(const char* extension_name) { - if (!extension_name) return *this; +InstanceBuilder& InstanceBuilder::enable_extension(const char* extension_name) +{ + if (!extension_name) + return *this; info.extensions.push_back(extension_name); return *this; } -InstanceBuilder& InstanceBuilder::enable_extensions(std::vector const& extensions) { - for (const auto extension : extensions) { +InstanceBuilder& InstanceBuilder::enable_extensions(std::vector const& extensions) +{ + for (const auto extension : extensions) + { info.extensions.push_back(extension); } return *this; } -InstanceBuilder& InstanceBuilder::enable_extensions(size_t count, const char* const* extensions) { - if (!extensions || count == 0) return *this; - for (size_t i = 0; i < count; i++) { +InstanceBuilder& InstanceBuilder::enable_extensions(size_t count, const char* const* extensions) +{ + if (!extensions || count == 0) + return *this; + for (size_t i = 0; i < count; i++) + { info.extensions.push_back(extensions[i]); } return *this; } -InstanceBuilder& InstanceBuilder::enable_validation_layers(bool enable_validation) { +InstanceBuilder& InstanceBuilder::enable_validation_layers(bool enable_validation) +{ info.enable_validation_layers = enable_validation; return *this; } -InstanceBuilder& InstanceBuilder::request_validation_layers(bool enable_validation) { +InstanceBuilder& InstanceBuilder::request_validation_layers(bool enable_validation) +{ info.request_validation_layers = enable_validation; return *this; } -InstanceBuilder& InstanceBuilder::use_default_debug_messenger() { +InstanceBuilder& InstanceBuilder::use_default_debug_messenger() +{ info.use_debug_messenger = true; - info.debug_callback = default_debug_callback; + info.debug_callback = default_debug_callback; return *this; } -InstanceBuilder& InstanceBuilder::set_debug_callback(PFN_vkDebugUtilsMessengerCallbackEXT callback) { +InstanceBuilder& InstanceBuilder::set_debug_callback(PFN_vkDebugUtilsMessengerCallbackEXT callback) +{ info.use_debug_messenger = true; - info.debug_callback = callback; + info.debug_callback = callback; return *this; } -InstanceBuilder& InstanceBuilder::set_debug_callback_user_data_pointer(void* user_data_pointer) { +InstanceBuilder& InstanceBuilder::set_debug_callback_user_data_pointer(void* user_data_pointer) +{ info.debug_user_data_pointer = user_data_pointer; return *this; } -InstanceBuilder& InstanceBuilder::set_headless(bool headless) { +InstanceBuilder& InstanceBuilder::set_headless(bool headless) +{ info.headless_context = headless; return *this; } -InstanceBuilder& InstanceBuilder::set_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) { +InstanceBuilder& InstanceBuilder::set_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) +{ info.debug_message_severity = severity; return *this; } -InstanceBuilder& InstanceBuilder::add_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) { +InstanceBuilder& InstanceBuilder::add_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) +{ info.debug_message_severity = info.debug_message_severity | severity; return *this; } -InstanceBuilder& InstanceBuilder::set_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) { +InstanceBuilder& InstanceBuilder::set_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) +{ info.debug_message_type = type; return *this; } -InstanceBuilder& InstanceBuilder::add_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) { +InstanceBuilder& InstanceBuilder::add_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) +{ info.debug_message_type = info.debug_message_type | type; return *this; } -InstanceBuilder& InstanceBuilder::add_validation_disable(VkValidationCheckEXT check) { +InstanceBuilder& InstanceBuilder::add_validation_disable(VkValidationCheckEXT check) +{ info.disabled_validation_checks.push_back(check); return *this; } -InstanceBuilder& InstanceBuilder::add_validation_feature_enable(VkValidationFeatureEnableEXT enable) { +InstanceBuilder& InstanceBuilder::add_validation_feature_enable(VkValidationFeatureEnableEXT enable) +{ info.enabled_validation_features.push_back(enable); return *this; } -InstanceBuilder& InstanceBuilder::add_validation_feature_disable(VkValidationFeatureDisableEXT disable) { +InstanceBuilder& InstanceBuilder::add_validation_feature_disable(VkValidationFeatureDisableEXT disable) +{ info.disabled_validation_features.push_back(disable); return *this; } -InstanceBuilder& InstanceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { +InstanceBuilder& InstanceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) +{ info.allocation_callbacks = callbacks; return *this; } void destroy_debug_messenger(VkInstance const instance, VkDebugUtilsMessengerEXT const messenger); - // ---- Physical Device ---- // GFXRECON_BEGIN_NAMESPACE(detail) -std::vector check_device_extension_support( - std::vector const& available_extensions, std::vector const& desired_extensions) { +std::vector check_device_extension_support(std::vector const& available_extensions, + std::vector const& desired_extensions) +{ std::vector extensions_to_enable; - for (const auto& avail_ext : available_extensions) { - for (auto& req_ext : desired_extensions) { - if (avail_ext == req_ext) { + for (const auto& avail_ext : available_extensions) + { + for (auto& req_ext : desired_extensions) + { + if (avail_ext == req_ext) + { extensions_to_enable.push_back(req_ext); break; } @@ -1066,22 +1269,34 @@ bool supports_features(const VkPhysicalDeviceFeatures& supported, } // clang-format on // Finds the first queue which supports the desired operations. -std::optional get_first_queue_index(std::vector const& families, VkQueueFlags desired_flags) { - for (uint32_t i = 0; i < static_cast(families.size()); i++) { - if ((families[i].queueFlags & desired_flags) == desired_flags) return i; +std::optional get_first_queue_index(std::vector const& families, + VkQueueFlags desired_flags) +{ + for (uint32_t i = 0; i < static_cast(families.size()); i++) + { + if ((families[i].queueFlags & desired_flags) == desired_flags) + return i; } return {}; } // Finds the queue which is separate from the graphics queue and has the desired flag and not the // undesired flag, but will select it if no better options are available compute support. -std::optional get_separate_queue_index( - std::vector const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) { +std::optional get_separate_queue_index(std::vector const& families, + VkQueueFlags desired_flags, + VkQueueFlags undesired_flags) +{ std::optional index = {}; - for (uint32_t i = 0; i < static_cast(families.size()); i++) { - if ((families[i].queueFlags & desired_flags) == desired_flags && ((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) { - if ((families[i].queueFlags & undesired_flags) == 0) { + for (uint32_t i = 0; i < static_cast(families.size()); i++) + { + if ((families[i].queueFlags & desired_flags) == desired_flags && + ((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) + { + if ((families[i].queueFlags & undesired_flags) == 0) + { return i; - } else { + } + else + { index = i; } } @@ -1090,9 +1305,12 @@ std::optional get_separate_queue_index( } // finds the first queue which supports only the desired flag (not graphics or transfer). -std::optional get_dedicated_queue_index( - std::vector const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) { - for (uint32_t i = 0; i < static_cast(families.size()); i++) { +std::optional get_dedicated_queue_index(std::vector const& families, + VkQueueFlags desired_flags, + VkQueueFlags undesired_flags) +{ + for (uint32_t i = 0; i < static_cast(families.size()); i++) + { if ((families[i].queueFlags & desired_flags) == desired_flags && (families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0 && (families[i].queueFlags & undesired_flags) == 0) return i; @@ -1101,43 +1319,58 @@ std::optional get_dedicated_queue_index( } // finds the first queue which supports presenting. -std::optional get_present_queue_index( - VkPhysicalDevice const phys_device, VkSurfaceKHR const surface, std::vector const& families) { - for (uint32_t i = 0; i < static_cast(families.size()); i++) { +std::optional get_present_queue_index(VkPhysicalDevice const phys_device, + VkSurfaceKHR const surface, + std::vector const& families) +{ + for (uint32_t i = 0; i < static_cast(families.size()); i++) + { VkBool32 presentSupport = false; - if (surface != VK_NULL_HANDLE) { - VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceSupportKHR(phys_device, i, surface, &presentSupport); - if (res != VK_SUCCESS) return {}; // TODO: determine if this should fail another way + if (surface != VK_NULL_HANDLE) + { + VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceSupportKHR( + phys_device, i, surface, &presentSupport); + if (res != VK_SUCCESS) + return {}; // TODO: determine if this should fail another way } - if (presentSupport == VK_TRUE) return i; + if (presentSupport == VK_TRUE) + return i; } return {}; } GFXRECON_END_NAMESPACE(detail) -PhysicalDevice PhysicalDeviceSelector::populate_device_details( - VkPhysicalDevice vk_phys_device, detail::GenericFeatureChain const& src_extended_features_chain) const { +PhysicalDevice +PhysicalDeviceSelector::populate_device_details(VkPhysicalDevice vk_phys_device, + detail::GenericFeatureChain const& src_extended_features_chain) const +{ PhysicalDevice physical_device{}; - physical_device.physical_device = vk_phys_device; - physical_device.surface = instance_info.surface; + physical_device.physical_device = vk_phys_device; + physical_device.surface = instance_info.surface; physical_device.defer_surface_initialization = criteria.defer_surface_initialization; - physical_device.instance_version = instance_info.version; - auto queue_families = detail::get_vector_noerror( + physical_device.instance_version = instance_info.version; + auto queue_families = detail::get_vector_noerror( detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, vk_phys_device); physical_device.queue_families = queue_families; detail::vulkan_functions().fp_vkGetPhysicalDeviceProperties(vk_phys_device, &physical_device.properties); detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures(vk_phys_device, &physical_device.features); - detail::vulkan_functions().fp_vkGetPhysicalDeviceMemoryProperties(vk_phys_device, &physical_device.memory_properties); + detail::vulkan_functions().fp_vkGetPhysicalDeviceMemoryProperties(vk_phys_device, + &physical_device.memory_properties); physical_device.name = physical_device.properties.deviceName; std::vector available_extensions; - auto available_extensions_ret = detail::get_vector( - available_extensions, detail::vulkan_functions().fp_vkEnumerateDeviceExtensionProperties, vk_phys_device, nullptr); - if (available_extensions_ret != VK_SUCCESS) return physical_device; - for (const auto& ext : available_extensions) { + auto available_extensions_ret = + detail::get_vector(available_extensions, + detail::vulkan_functions().fp_vkEnumerateDeviceExtensionProperties, + vk_phys_device, + nullptr); + if (available_extensions_ret != VK_SUCCESS) + return physical_device; + for (const auto& ext : available_extensions) + { physical_device.available_extensions.push_back(&ext.extensionName[0]); } @@ -1146,13 +1379,17 @@ PhysicalDevice PhysicalDeviceSelector::populate_device_details( auto fill_chain = src_extended_features_chain; bool instance_is_1_1 = instance_info.version >= VKB_VK_API_VERSION_1_1; - if (!fill_chain.nodes.empty() && (instance_is_1_1 || instance_info.properties2_ext_enabled)) { + if (!fill_chain.nodes.empty() && (instance_is_1_1 || instance_info.properties2_ext_enabled)) + { VkPhysicalDeviceFeatures2 local_features{}; fill_chain.chain_up(local_features); // Use KHR function if not able to use the core function - if (instance_is_1_1) { + if (instance_is_1_1) + { detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2(vk_phys_device, &local_features); - } else { + } + else + { detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2KHR(vk_phys_device, &local_features); } physical_device.extended_features_chain = fill_chain; @@ -1161,20 +1398,29 @@ PhysicalDevice PhysicalDeviceSelector::populate_device_details( return physical_device; } -PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevice const& pd) const { +PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevice const& pd) const +{ PhysicalDevice::Suitable suitable = PhysicalDevice::Suitable::yes; - if (criteria.name.size() > 0 && criteria.name != pd.properties.deviceName) return PhysicalDevice::Suitable::no; + if (criteria.name.size() > 0 && criteria.name != pd.properties.deviceName) + return PhysicalDevice::Suitable::no; - if (criteria.required_version > pd.properties.apiVersion) return PhysicalDevice::Suitable::no; - if (criteria.desired_version > pd.properties.apiVersion) suitable = PhysicalDevice::Suitable::partial; + if (criteria.required_version > pd.properties.apiVersion) + return PhysicalDevice::Suitable::no; + if (criteria.desired_version > pd.properties.apiVersion) + suitable = PhysicalDevice::Suitable::partial; - bool dedicated_compute = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); - bool dedicated_transfer = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); - bool separate_compute = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); - bool separate_transfer = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); + bool dedicated_compute = + detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); + bool dedicated_transfer = + detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); + bool separate_compute = + detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); + bool separate_transfer = + detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); - bool present_queue = detail::get_present_queue_index(pd.physical_device, instance_info.surface, pd.queue_families).has_value(); + bool present_queue = + detail::get_present_queue_index(pd.physical_device, instance_info.surface, pd.queue_families).has_value(); if (criteria.require_dedicated_compute_queue && !dedicated_compute) { @@ -1188,7 +1434,8 @@ PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevi { return PhysicalDevice::Suitable::no; } - if (criteria.require_separate_transfer_queue && !separate_transfer) { + if (criteria.require_separate_transfer_queue && !separate_transfer) + { return PhysicalDevice::Suitable::no; } if (criteria.require_present && !present_queue && !criteria.defer_surface_initialization) @@ -1201,41 +1448,54 @@ PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevi if (required_extensions_supported.size() != criteria.required_extensions.size()) return PhysicalDevice::Suitable::no; - auto desired_extensions_supported = detail::check_device_extension_support(pd.available_extensions, criteria.desired_extensions); + auto desired_extensions_supported = + detail::check_device_extension_support(pd.available_extensions, criteria.desired_extensions); if (desired_extensions_supported.size() != criteria.desired_extensions.size()) suitable = PhysicalDevice::Suitable::partial; - if (!criteria.defer_surface_initialization && criteria.require_present) { + if (!criteria.defer_surface_initialization && criteria.require_present) + { std::vector formats; - std::vector present_modes; - - auto formats_ret = detail::get_vector(formats, - detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, - pd.physical_device, - instance_info.surface); - auto present_modes_ret = detail::get_vector(present_modes, - detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, - pd.physical_device, - instance_info.surface); - - if (formats_ret != VK_SUCCESS || present_modes_ret != VK_SUCCESS || formats.empty() || present_modes.empty()) { + std::vector present_modes; + + auto formats_ret = + detail::get_vector(formats, + detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, + pd.physical_device, + instance_info.surface); + auto present_modes_ret = detail::get_vector( + present_modes, + detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, + pd.physical_device, + instance_info.surface); + + if (formats_ret != VK_SUCCESS || present_modes_ret != VK_SUCCESS || formats.empty() || present_modes.empty()) + { return PhysicalDevice::Suitable::no; } } - if (!criteria.allow_any_type && pd.properties.deviceType != static_cast(criteria.preferred_type)) { + if (!criteria.allow_any_type && + pd.properties.deviceType != static_cast(criteria.preferred_type)) + { suitable = PhysicalDevice::Suitable::partial; } bool required_features_supported = detail::supports_features( pd.features, criteria.required_features, pd.extended_features_chain, criteria.extended_features_chain); - if (!required_features_supported) return PhysicalDevice::Suitable::no; + if (!required_features_supported) + return PhysicalDevice::Suitable::no; - for (uint32_t i = 0; i < pd.memory_properties.memoryHeapCount; i++) { - if (pd.memory_properties.memoryHeaps[i].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) { - if (pd.memory_properties.memoryHeaps[i].size < criteria.required_mem_size) { + for (uint32_t i = 0; i < pd.memory_properties.memoryHeapCount; i++) + { + if (pd.memory_properties.memoryHeaps[i].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) + { + if (pd.memory_properties.memoryHeaps[i].size < criteria.required_mem_size) + { return PhysicalDevice::Suitable::no; - } else if (pd.memory_properties.memoryHeaps[i].size < criteria.desired_mem_size) { + } + else if (pd.memory_properties.memoryHeaps[i].size < criteria.desired_mem_size) + { suitable = PhysicalDevice::Suitable::partial; } } @@ -1244,23 +1504,27 @@ PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevi return suitable; } // delegate construction to the one with an explicit surface parameter -PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance) - : PhysicalDeviceSelector(instance, VK_NULL_HANDLE) {} +PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance) : + PhysicalDeviceSelector(instance, VK_NULL_HANDLE) +{} -PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance, VkSurfaceKHR surface) { - instance_info.instance = instance.instance; - instance_info.version = instance.instance_version; +PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance, VkSurfaceKHR surface) +{ + instance_info.instance = instance.instance; + instance_info.version = instance.instance_version; instance_info.properties2_ext_enabled = instance.properties2_ext_enabled; - instance_info.surface = surface; - criteria.require_present = !instance.headless; - criteria.required_version = instance.api_version; - criteria.desired_version = instance.api_version; + instance_info.surface = surface; + criteria.require_present = !instance.headless; + criteria.required_version = instance.api_version; + criteria.desired_version = instance.api_version; } -std::vector PhysicalDeviceSelector::select_impl(DeviceSelectionMode selection) const { +std::vector PhysicalDeviceSelector::select_impl(DeviceSelectionMode selection) const +{ #if !defined(NDEBUG) // Validation - for (const auto& node : criteria.extended_features_chain.nodes) { + for (const auto& node : criteria.extended_features_chain.nodes) + { assert(node.sType != static_cast(0) && "Features struct sType must be filled with the struct's " "corresponding VkStructureType enum"); @@ -1270,9 +1534,10 @@ std::vector PhysicalDeviceSelector::select_impl(DeviceSelectionM } #endif - if (criteria.require_present && !criteria.defer_surface_initialization) { + if (criteria.require_present && !criteria.defer_surface_initialization) + { if (instance_info.surface == VK_NULL_HANDLE) - throw to_exception( PhysicalDeviceError::no_surface_provided); + throw to_exception(PhysicalDeviceError::no_surface_provided); } // Get the VkPhysicalDevice handles on the system @@ -1280,17 +1545,19 @@ std::vector PhysicalDeviceSelector::select_impl(DeviceSelectionM auto vk_physical_devices_ret = detail::get_vector( vk_physical_devices, detail::vulkan_functions().fp_vkEnumeratePhysicalDevices, instance_info.instance); - if (vk_physical_devices_ret != VK_SUCCESS) { + if (vk_physical_devices_ret != VK_SUCCESS) + { throw to_exception(PhysicalDeviceError::failed_enumerate_physical_devices, vk_physical_devices_ret); } - if (vk_physical_devices.size() == 0) { + if (vk_physical_devices.size() == 0) + { throw to_exception(PhysicalDeviceError::no_physical_devices_found); } auto fill_out_phys_dev_with_criteria = [&](PhysicalDevice& phys_dev) { - phys_dev.features = criteria.required_features; + phys_dev.features = criteria.required_features; phys_dev.extended_features_chain = criteria.extended_features_chain; - bool portability_ext_available = false; + bool portability_ext_available = false; for (const auto& ext : phys_dev.available_extensions) if (criteria.enable_portability_subset && ext == "VK_KHR_portability_subset") portability_ext_available = true; @@ -1299,165 +1566,207 @@ std::vector PhysicalDeviceSelector::select_impl(DeviceSelectionM detail::check_device_extension_support(phys_dev.available_extensions, criteria.desired_extensions); phys_dev.extensions_to_enable.clear(); - phys_dev.extensions_to_enable.insert( - phys_dev.extensions_to_enable.end(), criteria.required_extensions.begin(), criteria.required_extensions.end()); - phys_dev.extensions_to_enable.insert( - phys_dev.extensions_to_enable.end(), desired_extensions_supported.begin(), desired_extensions_supported.end()); - if (portability_ext_available) { + phys_dev.extensions_to_enable.insert(phys_dev.extensions_to_enable.end(), + criteria.required_extensions.begin(), + criteria.required_extensions.end()); + phys_dev.extensions_to_enable.insert(phys_dev.extensions_to_enable.end(), + desired_extensions_supported.begin(), + desired_extensions_supported.end()); + if (portability_ext_available) + { phys_dev.extensions_to_enable.push_back("VK_KHR_portability_subset"); } }; // if this option is set, always return only the first physical device found - if (criteria.use_first_gpu_unconditionally && vk_physical_devices.size() > 0) { - PhysicalDevice physical_device = populate_device_details(vk_physical_devices[0], criteria.extended_features_chain); + if (criteria.use_first_gpu_unconditionally && vk_physical_devices.size() > 0) + { + PhysicalDevice physical_device = + populate_device_details(vk_physical_devices[0], criteria.extended_features_chain); fill_out_phys_dev_with_criteria(physical_device); return std::vector{ physical_device }; } // Populate their details and check their suitability std::vector physical_devices; - for (auto& vk_physical_device : vk_physical_devices) { + for (auto& vk_physical_device : vk_physical_devices) + { PhysicalDevice phys_dev = populate_device_details(vk_physical_device, criteria.extended_features_chain); - phys_dev.suitable = is_device_suitable(phys_dev); - if (phys_dev.suitable != PhysicalDevice::Suitable::no) { + phys_dev.suitable = is_device_suitable(phys_dev); + if (phys_dev.suitable != PhysicalDevice::Suitable::no) + { physical_devices.push_back(phys_dev); } } // sort the list into fully and partially suitable devices. use stable_partition to maintain relative order - const auto partition_index = std::stable_partition(physical_devices.begin(), physical_devices.end(), [](auto const& pd) { - return pd.suitable == PhysicalDevice::Suitable::yes; - }); + const auto partition_index = + std::stable_partition(physical_devices.begin(), physical_devices.end(), [](auto const& pd) { + return pd.suitable == PhysicalDevice::Suitable::yes; + }); // Remove the partially suitable elements if they aren't desired - if (selection == DeviceSelectionMode::only_fully_suitable) { + if (selection == DeviceSelectionMode::only_fully_suitable) + { physical_devices.erase(partition_index, physical_devices.end()); } // Make the physical device ready to be used to create a Device from it - for (auto& physical_device : physical_devices) { + for (auto& physical_device : physical_devices) + { fill_out_phys_dev_with_criteria(physical_device); } return physical_devices; } -PhysicalDevice PhysicalDeviceSelector::select(DeviceSelectionMode selection) const { +PhysicalDevice PhysicalDeviceSelector::select(DeviceSelectionMode selection) const +{ auto const selected_devices = select_impl(selection); - if (selected_devices.size() == 0) { + if (selected_devices.size() == 0) + { throw to_exception(PhysicalDeviceError::no_suitable_device); } return selected_devices.at(0); } -// Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device -std::vector PhysicalDeviceSelector::select_devices(DeviceSelectionMode selection) const { +// Return all devices which are considered suitable - intended for applications which want to let the user pick the +// physical device +std::vector PhysicalDeviceSelector::select_devices(DeviceSelectionMode selection) const +{ auto const selected_devices = select_impl(selection); - if (selected_devices.size() == 0) { + if (selected_devices.size() == 0) + { throw to_exception(PhysicalDeviceError::no_suitable_device); } return selected_devices; } -std::vector PhysicalDeviceSelector::select_device_names(DeviceSelectionMode selection) const { +std::vector PhysicalDeviceSelector::select_device_names(DeviceSelectionMode selection) const +{ auto const selected_devices = select_impl(selection); - if (selected_devices.size() == 0) { + if (selected_devices.size() == 0) + { throw to_exception(PhysicalDeviceError::no_suitable_device); } std::vector names; - for (const auto& pd : selected_devices) { + for (const auto& pd : selected_devices) + { names.push_back(pd.name); } return names; } -PhysicalDeviceSelector& PhysicalDeviceSelector::set_surface(VkSurfaceKHR surface) { +PhysicalDeviceSelector& PhysicalDeviceSelector::set_surface(VkSurfaceKHR surface) +{ instance_info.surface = surface; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::set_name(std::string const& name) { +PhysicalDeviceSelector& PhysicalDeviceSelector::set_name(std::string const& name) +{ criteria.name = name; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::prefer_gpu_device_type(PreferredDeviceType type) { +PhysicalDeviceSelector& PhysicalDeviceSelector::prefer_gpu_device_type(PreferredDeviceType type) +{ criteria.preferred_type = type; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::allow_any_gpu_device_type(bool allow_any_type) { +PhysicalDeviceSelector& PhysicalDeviceSelector::allow_any_gpu_device_type(bool allow_any_type) +{ criteria.allow_any_type = allow_any_type; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::require_present(bool require) { +PhysicalDeviceSelector& PhysicalDeviceSelector::require_present(bool require) +{ criteria.require_present = require; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_transfer_queue() { +PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_transfer_queue() +{ criteria.require_dedicated_transfer_queue = true; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_compute_queue() { +PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_compute_queue() +{ criteria.require_dedicated_compute_queue = true; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_transfer_queue() { +PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_transfer_queue() +{ criteria.require_separate_transfer_queue = true; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_compute_queue() { +PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_compute_queue() +{ criteria.require_separate_compute_queue = true; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::required_device_memory_size(VkDeviceSize size) { +PhysicalDeviceSelector& PhysicalDeviceSelector::required_device_memory_size(VkDeviceSize size) +{ criteria.required_mem_size = size; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::desired_device_memory_size(VkDeviceSize size) { +PhysicalDeviceSelector& PhysicalDeviceSelector::desired_device_memory_size(VkDeviceSize size) +{ criteria.desired_mem_size = size; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extension(const char* extension) { +PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extension(const char* extension) +{ criteria.required_extensions.push_back(extension); return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(std::vector const& extensions) { - for (const auto& ext : extensions) { +PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(std::vector const& extensions) +{ + for (const auto& ext : extensions) + { criteria.required_extensions.push_back(ext); } return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(size_t count, const char* const* extensions) { - if (!extensions || count == 0) return *this; - for (size_t i = 0; i < count; i++) { +PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(size_t count, const char* const* extensions) +{ + if (!extensions || count == 0) + return *this; + for (size_t i = 0; i < count; i++) + { criteria.required_extensions.push_back(extensions[i]); } return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extension(const char* extension) { +PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extension(const char* extension) +{ criteria.desired_extensions.push_back(extension); return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extensions(const std::vector& extensions) { - for (const auto& ext : extensions) { +PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extensions(const std::vector& extensions) +{ + for (const auto& ext : extensions) + { criteria.desired_extensions.push_back(ext); } return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::set_minimum_version(uint32_t major, uint32_t minor) { +PhysicalDeviceSelector& PhysicalDeviceSelector::set_minimum_version(uint32_t major, uint32_t minor) +{ criteria.required_version = VKB_MAKE_VK_VERSION(0, major, minor, 0); return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::set_desired_version(uint32_t major, uint32_t minor) { +PhysicalDeviceSelector& PhysicalDeviceSelector::set_desired_version(uint32_t major, uint32_t minor) +{ criteria.desired_version = VKB_MAKE_VK_VERSION(0, major, minor, 0); return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::disable_portability_subset() { +PhysicalDeviceSelector& PhysicalDeviceSelector::disable_portability_subset() +{ criteria.enable_portability_subset = false; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features(VkPhysicalDeviceFeatures const& features) { +PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features(VkPhysicalDeviceFeatures const& features) +{ detail::combine_features(criteria.required_features, features); return *this; } @@ -1465,98 +1774,129 @@ PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features(VkPhysical // The implementation of the set_required_features_1X functions sets the sType manually. This was a poor choice since // users of Vulkan should expect to fill out their structs properly. To make the functions take the struct parameter by // const reference, a local copy must be made in order to set the sType. -PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_11(VkPhysicalDeviceVulkan11Features const& features_11) { +PhysicalDeviceSelector& +PhysicalDeviceSelector::set_required_features_11(VkPhysicalDeviceVulkan11Features const& features_11) +{ VkPhysicalDeviceVulkan11Features features_11_copy = features_11; - features_11_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; + features_11_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; add_required_extension_features(features_11_copy); return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_12(VkPhysicalDeviceVulkan12Features const& features_12) { +PhysicalDeviceSelector& +PhysicalDeviceSelector::set_required_features_12(VkPhysicalDeviceVulkan12Features const& features_12) +{ VkPhysicalDeviceVulkan12Features features_12_copy = features_12; - features_12_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; + features_12_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; add_required_extension_features(features_12_copy); return *this; } #endif #if defined(VKB_VK_API_VERSION_1_3) -PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_13(VkPhysicalDeviceVulkan13Features const& features_13) { +PhysicalDeviceSelector& +PhysicalDeviceSelector::set_required_features_13(VkPhysicalDeviceVulkan13Features const& features_13) +{ VkPhysicalDeviceVulkan13Features features_13_copy = features_13; - features_13_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES; + features_13_copy.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES; add_required_extension_features(features_13_copy); return *this; } #endif -PhysicalDeviceSelector& PhysicalDeviceSelector::defer_surface_initialization() { +PhysicalDeviceSelector& PhysicalDeviceSelector::defer_surface_initialization() +{ criteria.defer_surface_initialization = true; return *this; } -PhysicalDeviceSelector& PhysicalDeviceSelector::select_first_device_unconditionally(bool unconditionally) { +PhysicalDeviceSelector& PhysicalDeviceSelector::select_first_device_unconditionally(bool unconditionally) +{ criteria.use_first_gpu_unconditionally = unconditionally; return *this; } // PhysicalDevice -bool PhysicalDevice::has_dedicated_compute_queue() const { +bool PhysicalDevice::has_dedicated_compute_queue() const +{ return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); } -bool PhysicalDevice::has_separate_compute_queue() const { +bool PhysicalDevice::has_separate_compute_queue() const +{ return detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT).has_value(); } -bool PhysicalDevice::has_dedicated_transfer_queue() const { +bool PhysicalDevice::has_dedicated_transfer_queue() const +{ return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); } -bool PhysicalDevice::has_separate_transfer_queue() const { +bool PhysicalDevice::has_separate_transfer_queue() const +{ return detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT).has_value(); } -std::vector PhysicalDevice::get_queue_families() const { return queue_families; } -std::vector PhysicalDevice::get_extensions() const { return extensions_to_enable; } -std::vector PhysicalDevice::get_available_extensions() const { return available_extensions; } -bool PhysicalDevice::is_extension_present(const char* ext) const { - return std::find_if(std::begin(available_extensions), std::end(available_extensions), [ext](std::string const& ext_name) { - return ext_name == ext; - }) != std::end(available_extensions); +std::vector PhysicalDevice::get_queue_families() const +{ + return queue_families; +} +std::vector PhysicalDevice::get_extensions() const +{ + return extensions_to_enable; +} +std::vector PhysicalDevice::get_available_extensions() const +{ + return available_extensions; } -bool PhysicalDevice::enable_extension_if_present(const char* extension) { +bool PhysicalDevice::is_extension_present(const char* ext) const +{ + return std::find_if(std::begin(available_extensions), + std::end(available_extensions), + [ext](std::string const& ext_name) { return ext_name == ext; }) != + std::end(available_extensions); +} +bool PhysicalDevice::enable_extension_if_present(const char* extension) +{ auto it = std::find_if(std::begin(available_extensions), std::end(available_extensions), [extension](std::string const& ext_name) { return ext_name == extension; }); - if (it != std::end(available_extensions)) { + if (it != std::end(available_extensions)) + { extensions_to_enable.push_back(extension); return true; } return false; } -bool PhysicalDevice::enable_extensions_if_present(const std::vector& extensions) { - for (const auto extension : extensions) { +bool PhysicalDevice::enable_extensions_if_present(const std::vector& extensions) +{ + for (const auto extension : extensions) + { auto it = std::find_if(std::begin(available_extensions), std::end(available_extensions), [extension](std::string const& ext_name) { return ext_name == extension; }); - if (it == std::end(available_extensions)) return false; + if (it == std::end(available_extensions)) + return false; } - for (const auto extension : extensions) - extensions_to_enable.push_back(extension); + for (const auto extension : extensions) extensions_to_enable.push_back(extension); return true; } -bool PhysicalDevice::enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable) { +bool PhysicalDevice::enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable) +{ VkPhysicalDeviceFeatures actual_pdf{}; detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures(physical_device, &actual_pdf); bool required_features_supported = detail::supports_features(actual_pdf, features_to_enable, {}, {}); - if (required_features_supported) { + if (required_features_supported) + { detail::combine_features(features, features_to_enable); } return required_features_supported; } -bool PhysicalDevice::is_features_node_present(detail::GenericFeaturesPNextNode const& node) const { +bool PhysicalDevice::is_features_node_present(detail::GenericFeaturesPNextNode const& node) const +{ detail::GenericFeatureChain requested_features; requested_features.nodes.push_back(node); return extended_features_chain.find_and_match(requested_features); } -bool PhysicalDevice::enable_features_node_if_present(detail::GenericFeaturesPNextNode const& node) { +bool PhysicalDevice::enable_features_node_if_present(detail::GenericFeaturesPNextNode const& node) +{ VkPhysicalDeviceFeatures2 actual_pdf2{}; detail::GenericFeatureChain requested_features; @@ -1564,34 +1904,45 @@ bool PhysicalDevice::enable_features_node_if_present(detail::GenericFeaturesPNex detail::GenericFeatureChain fill_chain = requested_features; // Zero out supported features - memset(fill_chain.nodes.front().fields, UINT8_MAX, sizeof(VkBool32) * detail::GenericFeaturesPNextNode::field_capacity); + memset(fill_chain.nodes.front().fields, + UINT8_MAX, + sizeof(VkBool32) * detail::GenericFeaturesPNextNode::field_capacity); actual_pdf2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; fill_chain.chain_up(actual_pdf2); bool required_features_supported = false; - bool instance_is_1_1 = instance_version >= VKB_VK_API_VERSION_1_1; - if (instance_is_1_1 || properties2_ext_enabled) { - if (instance_is_1_1) { + bool instance_is_1_1 = instance_version >= VKB_VK_API_VERSION_1_1; + if (instance_is_1_1 || properties2_ext_enabled) + { + if (instance_is_1_1) + { detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2(physical_device, &actual_pdf2); - } else { + } + else + { detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2KHR(physical_device, &actual_pdf2); } required_features_supported = fill_chain.match_all(requested_features); - if (required_features_supported) { + if (required_features_supported) + { extended_features_chain.combine(requested_features); } } return required_features_supported; } - -PhysicalDevice::operator VkPhysicalDevice() const { return this->physical_device; } +PhysicalDevice::operator VkPhysicalDevice() const +{ + return this->physical_device; +} // ---- Queues ---- // -std::optional Device::get_queue_index(QueueType type) const { - switch (type) { +std::optional Device::get_queue_index(QueueType type) const +{ + switch (type) + { case QueueType::present: return detail::get_present_queue_index(physical_device.physical_device, surface, queue_families); break; @@ -1606,8 +1957,10 @@ std::optional Device::get_queue_index(QueueType type) const { } } -std::optional Device::get_dedicated_queue_index(QueueType type) const { - switch (type) { +std::optional Device::get_dedicated_queue_index(QueueType type) const +{ + switch (type) + { case QueueType::compute: return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT); case QueueType::transfer: @@ -1617,17 +1970,21 @@ std::optional Device::get_dedicated_queue_index(QueueType type) const } } -std::optional Device::get_queue(QueueType type) const { +std::optional Device::get_queue(QueueType type) const +{ auto index = get_queue_index(type); - if (!index.has_value()) return {}; + if (!index.has_value()) + return {}; VkQueue out_queue; internal_table.fp_vkGetDeviceQueue(device, *index, 0, &out_queue); return out_queue; } -std::optional Device::get_dedicated_queue(QueueType type) const { +std::optional Device::get_dedicated_queue(QueueType type) const +{ auto index = get_dedicated_queue_index(type); - if (!index.has_value()) return {}; + if (!index.has_value()) + return {}; VkQueue out_queue; internal_table.fp_vkGetDeviceQueue(device, *index, 0, &out_queue); return out_queue; @@ -1635,123 +1992,156 @@ std::optional Device::get_dedicated_queue(QueueType type) const { // ---- Dispatch ---- // -vkb::DispatchTable Device::make_table() const { return { device, fp_vkGetDeviceProcAddr }; } +vkb::DispatchTable Device::make_table() const +{ + return { device, fp_vkGetDeviceProcAddr }; +} // ---- Device ---- // -Device::operator VkDevice() const { return this->device; } +Device::operator VkDevice() const +{ + return this->device; +} -CustomQueueDescription::CustomQueueDescription(uint32_t index, std::vector priorities) - : index(index), priorities(std::move(priorities)) {} +CustomQueueDescription::CustomQueueDescription(uint32_t index, std::vector priorities) : + index(index), priorities(std::move(priorities)) +{} -void destroy_device(Device const& device) { +void destroy_device(Device const& device) +{ device.internal_table.fp_vkDestroyDevice(device.device, device.allocation_callbacks); } -DeviceBuilder::DeviceBuilder(PhysicalDevice phys_device) { physical_device = std::move(phys_device); } +DeviceBuilder::DeviceBuilder(PhysicalDevice phys_device) +{ + physical_device = std::move(phys_device); +} -Device DeviceBuilder::build() const { +Device DeviceBuilder::build() const +{ std::vector queue_descriptions; queue_descriptions.insert(queue_descriptions.end(), info.queue_descriptions.begin(), info.queue_descriptions.end()); - if (queue_descriptions.empty()) { - for (uint32_t i = 0; i < physical_device.queue_families.size(); i++) { + if (queue_descriptions.empty()) + { + for (uint32_t i = 0; i < physical_device.queue_families.size(); i++) + { queue_descriptions.emplace_back(i, std::vector{ 1.0f }); } } std::vector queueCreateInfos; - for (auto& desc : queue_descriptions) { + for (auto& desc : queue_descriptions) + { VkDeviceQueueCreateInfo queue_create_info = {}; - queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_info.queueFamilyIndex = desc.index; - queue_create_info.queueCount = static_cast(desc.priorities.size()); - queue_create_info.pQueuePriorities = desc.priorities.data(); + queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_info.queueFamilyIndex = desc.index; + queue_create_info.queueCount = static_cast(desc.priorities.size()); + queue_create_info.pQueuePriorities = desc.priorities.data(); queueCreateInfos.push_back(queue_create_info); } std::vector extensions_to_enable; - for (const auto& ext : physical_device.extensions_to_enable) { + for (const auto& ext : physical_device.extensions_to_enable) + { extensions_to_enable.push_back(ext.c_str()); } if (physical_device.surface != VK_NULL_HANDLE || physical_device.defer_surface_initialization) extensions_to_enable.push_back({ VK_KHR_SWAPCHAIN_EXTENSION_NAME }); std::vector final_pnext_chain; - VkDeviceCreateInfo device_create_info = {}; + VkDeviceCreateInfo device_create_info = {}; bool user_defined_phys_dev_features_2 = false; - for (auto& pnext : info.pNext_chain) { - if (pnext->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2) { + for (auto& pnext : info.pNext_chain) + { + if (pnext->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2) + { user_defined_phys_dev_features_2 = true; break; } } - if (user_defined_phys_dev_features_2 && !physical_device.extended_features_chain.nodes.empty()) { - throw to_exception(DeviceError::VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features); + if (user_defined_phys_dev_features_2 && !physical_device.extended_features_chain.nodes.empty()) + { + throw to_exception( + DeviceError::VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features); } // These objects must be alive during the call to vkCreateDevice - auto physical_device_extension_features_copy = physical_device.extended_features_chain; + auto physical_device_extension_features_copy = physical_device.extended_features_chain; VkPhysicalDeviceFeatures2 local_features2{}; - local_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + local_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; local_features2.features = physical_device.features; - if (!user_defined_phys_dev_features_2) { - if (physical_device.instance_version >= VKB_VK_API_VERSION_1_1 || physical_device.properties2_ext_enabled) { + if (!user_defined_phys_dev_features_2) + { + if (physical_device.instance_version >= VKB_VK_API_VERSION_1_1 || physical_device.properties2_ext_enabled) + { final_pnext_chain.push_back(reinterpret_cast(&local_features2)); - for (auto& features_node : physical_device_extension_features_copy.nodes) { + for (auto& features_node : physical_device_extension_features_copy.nodes) + { final_pnext_chain.push_back(reinterpret_cast(&features_node)); } - } else { - // Only set device_create_info.pEnabledFeatures when the pNext chain does not contain a VkPhysicalDeviceFeatures2 structure + } + else + { + // Only set device_create_info.pEnabledFeatures when the pNext chain does not contain a + // VkPhysicalDeviceFeatures2 structure device_create_info.pEnabledFeatures = &physical_device.features; } } - for (auto& pnext : info.pNext_chain) { + for (auto& pnext : info.pNext_chain) + { final_pnext_chain.push_back(pnext); } detail::setup_pNext_chain(device_create_info, final_pnext_chain); #if !defined(NDEBUG) - for (auto& node : final_pnext_chain) { + for (auto& node : final_pnext_chain) + { assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO); } #endif - device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; - device_create_info.flags = info.flags; - device_create_info.queueCreateInfoCount = static_cast(queueCreateInfos.size()); - device_create_info.pQueueCreateInfos = queueCreateInfos.data(); - device_create_info.enabledExtensionCount = static_cast(extensions_to_enable.size()); + device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + device_create_info.flags = info.flags; + device_create_info.queueCreateInfoCount = static_cast(queueCreateInfos.size()); + device_create_info.pQueueCreateInfos = queueCreateInfos.data(); + device_create_info.enabledExtensionCount = static_cast(extensions_to_enable.size()); device_create_info.ppEnabledExtensionNames = extensions_to_enable.data(); Device device; VkResult res = detail::vulkan_functions().fp_vkCreateDevice( physical_device.physical_device, &device_create_info, info.allocation_callbacks, &device.device); - if (res != VK_SUCCESS) { + if (res != VK_SUCCESS) + { throw to_exception(DeviceError::failed_create_device, res); } - device.physical_device = physical_device; - device.surface = physical_device.surface; - device.queue_families = physical_device.queue_families; - device.allocation_callbacks = info.allocation_callbacks; + device.physical_device = physical_device; + device.surface = physical_device.surface; + device.queue_families = physical_device.queue_families; + device.allocation_callbacks = info.allocation_callbacks; device.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr; - detail::vulkan_functions().get_device_proc_addr(device.device, device.internal_table.fp_vkGetDeviceQueue, "vkGetDeviceQueue"); - detail::vulkan_functions().get_device_proc_addr(device.device, device.internal_table.fp_vkDestroyDevice, "vkDestroyDevice"); + detail::vulkan_functions().get_device_proc_addr( + device.device, device.internal_table.fp_vkGetDeviceQueue, "vkGetDeviceQueue"); + detail::vulkan_functions().get_device_proc_addr( + device.device, device.internal_table.fp_vkDestroyDevice, "vkDestroyDevice"); device.instance_version = physical_device.instance_version; return device; } -DeviceBuilder& DeviceBuilder::custom_queue_setup(std::vector queue_descriptions) { +DeviceBuilder& DeviceBuilder::custom_queue_setup(std::vector queue_descriptions) +{ info.queue_descriptions = std::move(queue_descriptions); return *this; } -DeviceBuilder& DeviceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { +DeviceBuilder& DeviceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) +{ info.allocation_callbacks = callbacks; return *this; } @@ -1760,13 +2150,15 @@ DeviceBuilder& DeviceBuilder::set_allocation_callbacks(VkAllocationCallbacks* ca GFXRECON_BEGIN_NAMESPACE(detail) -struct SurfaceSupportDetails { - VkSurfaceCapabilitiesKHR capabilities; +struct SurfaceSupportDetails +{ + VkSurfaceCapabilitiesKHR capabilities; std::vector formats; - std::vector present_modes; + std::vector present_modes; }; -enum class SurfaceSupportError { +enum class SurfaceSupportError +{ surface_handle_null, failed_get_surface_capabilities, failed_enumerate_surface_formats, @@ -1788,10 +2180,12 @@ const char* to_string(SurfaceSupportError err) } } -std::exception to_exception(SurfaceSupportError error) { +std::exception to_exception(SurfaceSupportError error) +{ return std::runtime_error(to_string(error)); } -std::exception to_exception(SurfaceSupportError error, VkResult result) { +std::exception to_exception(SurfaceSupportError error, VkResult result) +{ std::string message{}; message.append(to_string(error)); message.append(": "); @@ -1799,17 +2193,21 @@ std::exception to_exception(SurfaceSupportError error, VkResult result) { return std::runtime_error(message); } -SurfaceSupportDetails query_surface_support_details(VkPhysicalDevice phys_device, VkSurfaceKHR surface) { - if (surface == VK_NULL_HANDLE) throw to_exception(SurfaceSupportError::surface_handle_null); +SurfaceSupportDetails query_surface_support_details(VkPhysicalDevice phys_device, VkSurfaceKHR surface) +{ + if (surface == VK_NULL_HANDLE) + throw to_exception(SurfaceSupportError::surface_handle_null); VkSurfaceCapabilitiesKHR capabilities; - VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities); - if (res != VK_SUCCESS) { + VkResult res = + detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities); + if (res != VK_SUCCESS) + { throw to_exception(SurfaceSupportError::failed_get_surface_capabilities, res); } std::vector formats; - std::vector present_modes; + std::vector present_modes; auto formats_ret = detail::get_vector( formats, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, phys_device, surface); @@ -1823,12 +2221,17 @@ SurfaceSupportDetails query_surface_support_details(VkPhysicalDevice phys_device return SurfaceSupportDetails{ capabilities, formats, present_modes }; } -std::optional find_desired_surface_format( - std::vector const& available_formats, std::vector const& desired_formats) { - for (auto const& desired_format : desired_formats) { - for (auto const& available_format : available_formats) { +std::optional find_desired_surface_format(std::vector const& available_formats, + std::vector const& desired_formats) +{ + for (auto const& desired_format : desired_formats) + { + for (auto const& available_format : available_formats) + { // finds the first format that is desired and available - if (desired_format.format == available_format.format && desired_format.colorSpace == available_format.colorSpace) { + if (desired_format.format == available_format.format && + desired_format.colorSpace == available_format.colorSpace) + { return desired_format; } } @@ -1837,37 +2240,54 @@ std::optional find_desired_surface_format( return {}; } -VkSurfaceFormatKHR find_best_surface_format( - std::vector const& available_formats, std::vector const& desired_formats) { +VkSurfaceFormatKHR find_best_surface_format(std::vector const& available_formats, + std::vector const& desired_formats) +{ auto surface_format_ret = detail::find_desired_surface_format(available_formats, desired_formats); return surface_format_ret.value_or(available_formats[0]); } VkPresentModeKHR find_present_mode(std::vector const& available_resent_modes, - std::vector const& desired_present_modes) { - for (auto const& desired_pm : desired_present_modes) { - for (auto const& available_pm : available_resent_modes) { + std::vector const& desired_present_modes) +{ + for (auto const& desired_pm : desired_present_modes) + { + for (auto const& available_pm : available_resent_modes) + { // finds the first present mode that is desired and available - if (desired_pm == available_pm) return desired_pm; + if (desired_pm == available_pm) + return desired_pm; } } // only present mode required, use as a fallback return VK_PRESENT_MODE_FIFO_KHR; } -template T minimum(T a, T b) { return a < b ? a : b; } -template T maximum(T a, T b) { return a > b ? a : b; } +template +T minimum(T a, T b) +{ + return a < b ? a : b; +} +template +T maximum(T a, T b) +{ + return a > b ? a : b; +} -VkExtent2D find_extent(VkSurfaceCapabilitiesKHR const& capabilities, uint32_t desired_width, uint32_t desired_height) { - if (capabilities.currentExtent.width != UINT32_MAX) { +VkExtent2D find_extent(VkSurfaceCapabilitiesKHR const& capabilities, uint32_t desired_width, uint32_t desired_height) +{ + if (capabilities.currentExtent.width != UINT32_MAX) + { return capabilities.currentExtent; - } else { + } + else + { VkExtent2D actualExtent = { desired_width, desired_height }; actualExtent.width = maximum(capabilities.minImageExtent.width, minimum(capabilities.maxImageExtent.width, actualExtent.width)); - actualExtent.height = - maximum(capabilities.minImageExtent.height, minimum(capabilities.maxImageExtent.height, actualExtent.height)); + actualExtent.height = maximum(capabilities.minImageExtent.height, + minimum(capabilities.maxImageExtent.height, actualExtent.height)); return actualExtent; } @@ -1875,50 +2295,57 @@ VkExtent2D find_extent(VkSurfaceCapabilitiesKHR const& capabilities, uint32_t de GFXRECON_END_NAMESPACE(detail) -void destroy_swapchain(Swapchain const& swapchain) { - if (swapchain.device != VK_NULL_HANDLE && swapchain.swapchain != VK_NULL_HANDLE) { - swapchain.internal_table.fp_vkDestroySwapchainKHR(swapchain.device, swapchain.swapchain, swapchain.allocation_callbacks); +void destroy_swapchain(Swapchain const& swapchain) +{ + if (swapchain.device != VK_NULL_HANDLE && swapchain.swapchain != VK_NULL_HANDLE) + { + swapchain.internal_table.fp_vkDestroySwapchainKHR( + swapchain.device, swapchain.swapchain, swapchain.allocation_callbacks); } } -SwapchainBuilder::SwapchainBuilder(Device const& device) { - info.physical_device = device.physical_device.physical_device; - info.device = device.device; - info.surface = device.surface; +SwapchainBuilder::SwapchainBuilder(Device const& device) +{ + info.physical_device = device.physical_device.physical_device; + info.device = device.device; + info.surface = device.surface; info.instance_version = device.instance_version; - auto present = device.get_queue_index(QueueType::present); - auto graphics = device.get_queue_index(QueueType::graphics); + auto present = device.get_queue_index(QueueType::present); + auto graphics = device.get_queue_index(QueueType::graphics); assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid"); info.graphics_queue_index = *present; - info.present_queue_index = *graphics; + info.present_queue_index = *graphics; info.allocation_callbacks = device.allocation_callbacks; } -SwapchainBuilder::SwapchainBuilder(Device const& device, VkSurfaceKHR const surface) { - info.physical_device = device.physical_device.physical_device; - info.device = device.device; - info.surface = surface; +SwapchainBuilder::SwapchainBuilder(Device const& device, VkSurfaceKHR const surface) +{ + info.physical_device = device.physical_device.physical_device; + info.device = device.device; + info.surface = surface; info.instance_version = device.instance_version; - Device temp_device = device; - temp_device.surface = surface; - auto present = temp_device.get_queue_index(QueueType::present); - auto graphics = temp_device.get_queue_index(QueueType::graphics); + Device temp_device = device; + temp_device.surface = surface; + auto present = temp_device.get_queue_index(QueueType::present); + auto graphics = temp_device.get_queue_index(QueueType::graphics); assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid"); info.graphics_queue_index = *graphics; - info.present_queue_index = *present; + info.present_queue_index = *present; info.allocation_callbacks = device.allocation_callbacks; } -SwapchainBuilder::SwapchainBuilder(VkPhysicalDevice const physical_device, - VkDevice const device, - VkSurfaceKHR const surface, +SwapchainBuilder::SwapchainBuilder(VkPhysicalDevice const physical_device, + VkDevice const device, + VkSurfaceKHR const surface, std::optional graphics_queue_index, - std::optional present_queue_index) { - info.physical_device = physical_device; - info.device = device; - info.surface = surface; + std::optional present_queue_index) +{ + info.physical_device = physical_device; + info.device = device; + info.surface = surface; info.graphics_queue_index = graphics_queue_index; - info.present_queue_index = present_queue_index; - if (!graphics_queue_index.has_value() || !present_queue_index.has_value()) { + info.present_queue_index = present_queue_index; + if (!graphics_queue_index.has_value() || !present_queue_index.has_value()) + { auto queue_families = detail::get_vector_noerror( detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, physical_device); if (!graphics_queue_index.has_value()) @@ -1927,33 +2354,44 @@ SwapchainBuilder::SwapchainBuilder(VkPhysicalDevice const physical_device, info.present_queue_index = detail::get_present_queue_index(physical_device, surface, queue_families); } } -Swapchain SwapchainBuilder::build() const { - if (info.surface == VK_NULL_HANDLE) { +Swapchain SwapchainBuilder::build() const +{ + if (info.surface == VK_NULL_HANDLE) + { throw to_exception(SwapchainError::surface_handle_not_provided); } auto desired_formats = info.desired_formats; - if (desired_formats.size() == 0) add_desired_formats(desired_formats); + if (desired_formats.size() == 0) + add_desired_formats(desired_formats); auto desired_present_modes = info.desired_present_modes; - if (desired_present_modes.size() == 0) add_desired_present_modes(desired_present_modes); + if (desired_present_modes.size() == 0) + add_desired_present_modes(desired_present_modes); auto surface_support = detail::query_surface_support_details(info.physical_device, info.surface); uint32_t image_count = info.min_image_count; - if (info.required_min_image_count >= 1) { + if (info.required_min_image_count >= 1) + { if (info.required_min_image_count < surface_support.capabilities.minImageCount) throw to_exception(SwapchainError::required_min_image_count_too_low); image_count = info.required_min_image_count; - } else if (info.min_image_count == 0) { - // We intentionally use minImageCount + 1 to maintain existing behavior, even if it typically results in triple buffering on most systems. + } + else if (info.min_image_count == 0) + { + // We intentionally use minImageCount + 1 to maintain existing behavior, even if it typically results in triple + // buffering on most systems. image_count = surface_support.capabilities.minImageCount + 1; - } else { + } + else + { image_count = info.min_image_count; if (image_count < surface_support.capabilities.minImageCount) image_count = surface_support.capabilities.minImageCount; } - if (surface_support.capabilities.maxImageCount > 0 && image_count > surface_support.capabilities.maxImageCount) { + if (surface_support.capabilities.maxImageCount > 0 && image_count > surface_support.capabilities.maxImageCount) + { image_count = surface_support.capabilities.maxImageCount; } @@ -1964,20 +2402,23 @@ Swapchain SwapchainBuilder::build() const { uint32_t image_array_layers = info.array_layer_count; if (surface_support.capabilities.maxImageArrayLayers < info.array_layer_count) image_array_layers = surface_support.capabilities.maxImageArrayLayers; - if (info.array_layer_count == 0) image_array_layers = 1; + if (info.array_layer_count == 0) + image_array_layers = 1; uint32_t queue_family_indices[] = { *info.graphics_queue_index, *info.present_queue_index }; VkPresentModeKHR present_mode = detail::find_present_mode(surface_support.present_modes, desired_present_modes); - // VkSurfaceCapabilitiesKHR::supportedUsageFlags is only only valid for some present modes. For shared present modes, we should also check VkSharedPresentSurfaceCapabilitiesKHR::sharedPresentSupportedUsageFlags. + // VkSurfaceCapabilitiesKHR::supportedUsageFlags is only only valid for some present modes. For shared present + // modes, we should also check VkSharedPresentSurfaceCapabilitiesKHR::sharedPresentSupportedUsageFlags. auto is_unextended_present_mode = [](VkPresentModeKHR present_mode) { return (present_mode == VK_PRESENT_MODE_IMMEDIATE_KHR) || (present_mode == VK_PRESENT_MODE_MAILBOX_KHR) || (present_mode == VK_PRESENT_MODE_FIFO_KHR) || (present_mode == VK_PRESENT_MODE_FIFO_RELAXED_KHR); }; if (is_unextended_present_mode(present_mode) && - (info.image_usage_flags & surface_support.capabilities.supportedUsageFlags) != info.image_usage_flags) { + (info.image_usage_flags & surface_support.capabilities.supportedUsageFlags) != info.image_usage_flags) + { throw to_exception(SwapchainError::required_usage_not_supported); } @@ -1986,79 +2427,95 @@ Swapchain SwapchainBuilder::build() const { pre_transform = surface_support.capabilities.currentTransform; VkSwapchainCreateInfoKHR swapchain_create_info = {}; - swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; detail::setup_pNext_chain(swapchain_create_info, info.pNext_chain); #if !defined(NDEBUG) - for (auto& node : info.pNext_chain) { + for (auto& node : info.pNext_chain) + { assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO); } #endif - swapchain_create_info.flags = info.create_flags; - swapchain_create_info.surface = info.surface; - swapchain_create_info.minImageCount = image_count; - swapchain_create_info.imageFormat = surface_format.format; - swapchain_create_info.imageColorSpace = surface_format.colorSpace; - swapchain_create_info.imageExtent = extent; + swapchain_create_info.flags = info.create_flags; + swapchain_create_info.surface = info.surface; + swapchain_create_info.minImageCount = image_count; + swapchain_create_info.imageFormat = surface_format.format; + swapchain_create_info.imageColorSpace = surface_format.colorSpace; + swapchain_create_info.imageExtent = extent; swapchain_create_info.imageArrayLayers = image_array_layers; - swapchain_create_info.imageUsage = info.image_usage_flags; + swapchain_create_info.imageUsage = info.image_usage_flags; - if (info.graphics_queue_index != info.present_queue_index) { - swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + if (info.graphics_queue_index != info.present_queue_index) + { + swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; swapchain_create_info.queueFamilyIndexCount = 2; - swapchain_create_info.pQueueFamilyIndices = queue_family_indices; - } else { + swapchain_create_info.pQueueFamilyIndices = queue_family_indices; + } + else + { swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; } - swapchain_create_info.preTransform = pre_transform; + swapchain_create_info.preTransform = pre_transform; swapchain_create_info.compositeAlpha = info.composite_alpha; - swapchain_create_info.presentMode = present_mode; - swapchain_create_info.clipped = info.clipped; - swapchain_create_info.oldSwapchain = info.old_swapchain; - Swapchain swapchain{}; + swapchain_create_info.presentMode = present_mode; + swapchain_create_info.clipped = info.clipped; + swapchain_create_info.oldSwapchain = info.old_swapchain; + Swapchain swapchain{}; PFN_vkCreateSwapchainKHR swapchain_create_proc; detail::vulkan_functions().get_device_proc_addr(info.device, swapchain_create_proc, "vkCreateSwapchainKHR"); - auto res = swapchain_create_proc(info.device, &swapchain_create_info, info.allocation_callbacks, &swapchain.swapchain); + auto res = + swapchain_create_proc(info.device, &swapchain_create_info, info.allocation_callbacks, &swapchain.swapchain); - if (res != VK_SUCCESS) { + if (res != VK_SUCCESS) + { throw to_exception(SwapchainError::failed_create_swapchain, res); } - swapchain.device = info.device; - swapchain.image_format = surface_format.format; - swapchain.color_space = surface_format.colorSpace; + swapchain.device = info.device; + swapchain.image_format = surface_format.format; + swapchain.color_space = surface_format.colorSpace; swapchain.image_usage_flags = info.image_usage_flags; - swapchain.extent = extent; + swapchain.extent = extent; detail::vulkan_functions().get_device_proc_addr( info.device, swapchain.internal_table.fp_vkGetSwapchainImagesKHR, "vkGetSwapchainImagesKHR"); - detail::vulkan_functions().get_device_proc_addr(info.device, swapchain.internal_table.fp_vkCreateImageView, "vkCreateImageView"); - detail::vulkan_functions().get_device_proc_addr(info.device, swapchain.internal_table.fp_vkDestroyImageView, "vkDestroyImageView"); + detail::vulkan_functions().get_device_proc_addr( + info.device, swapchain.internal_table.fp_vkCreateImageView, "vkCreateImageView"); + detail::vulkan_functions().get_device_proc_addr( + info.device, swapchain.internal_table.fp_vkDestroyImageView, "vkDestroyImageView"); detail::vulkan_functions().get_device_proc_addr( info.device, swapchain.internal_table.fp_vkDestroySwapchainKHR, "vkDestroySwapchainKHR"); - auto images = swapchain.get_images(); + auto images = swapchain.get_images(); swapchain.requested_min_image_count = image_count; - swapchain.present_mode = present_mode; - swapchain.image_count = static_cast(images.size()); - swapchain.instance_version = info.instance_version; - swapchain.allocation_callbacks = info.allocation_callbacks; + swapchain.present_mode = present_mode; + swapchain.image_count = static_cast(images.size()); + swapchain.instance_version = info.instance_version; + swapchain.allocation_callbacks = info.allocation_callbacks; return swapchain; } -std::vector Swapchain::get_images() { +std::vector Swapchain::get_images() +{ std::vector swapchain_images; auto swapchain_images_ret = detail::get_vector(swapchain_images, internal_table.fp_vkGetSwapchainImagesKHR, device, swapchain); - if (swapchain_images_ret != VK_SUCCESS) { + if (swapchain_images_ret != VK_SUCCESS) + { throw to_exception(SwapchainError::failed_get_swapchain_images, swapchain_images_ret); } return swapchain_images; } -std::vector Swapchain::get_image_views() { return get_image_views(nullptr); } -std::vector Swapchain::get_image_views(const void* pNext) { +std::vector Swapchain::get_image_views() +{ + return get_image_views(nullptr); +} +std::vector Swapchain::get_image_views(const void* pNext) +{ const auto swapchain_images = get_images(); bool already_contains_image_view_usage = false; - while (pNext) { - if (reinterpret_cast(pNext)->sType == VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO) { + while (pNext) + { + if (reinterpret_cast(pNext)->sType == VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO) + { already_contains_image_view_usage = true; break; } @@ -2070,180 +2527,219 @@ std::vector Swapchain::get_image_views(const void* pNext) { desired_flags.usage = image_usage_flags; std::vector views(swapchain_images.size()); - for (size_t i = 0; i < swapchain_images.size(); i++) { + for (size_t i = 0; i < swapchain_images.size(); i++) + { VkImageViewCreateInfo createInfo = {}; - createInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; - if (instance_version >= VKB_VK_API_VERSION_1_1 && !already_contains_image_view_usage) { + createInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + if (instance_version >= VKB_VK_API_VERSION_1_1 && !already_contains_image_view_usage) + { createInfo.pNext = &desired_flags; - } else { + } + else + { createInfo.pNext = pNext; } - createInfo.image = swapchain_images[i]; - createInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; - createInfo.format = image_format; - createInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; - createInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; - createInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; - createInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; - createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - createInfo.subresourceRange.baseMipLevel = 0; - createInfo.subresourceRange.levelCount = 1; + createInfo.image = swapchain_images[i]; + createInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; + createInfo.format = image_format; + createInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; + createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + createInfo.subresourceRange.baseMipLevel = 0; + createInfo.subresourceRange.levelCount = 1; createInfo.subresourceRange.baseArrayLayer = 0; - createInfo.subresourceRange.layerCount = 1; + createInfo.subresourceRange.layerCount = 1; VkResult res = internal_table.fp_vkCreateImageView(device, &createInfo, allocation_callbacks, &views[i]); - if (res != VK_SUCCESS) throw to_exception(SwapchainError::failed_create_swapchain_image_views, res); + if (res != VK_SUCCESS) + throw to_exception(SwapchainError::failed_create_swapchain_image_views, res); } return views; } -void Swapchain::destroy_image_views(std::vector const& image_views) { - for (auto& image_view : image_views) { +void Swapchain::destroy_image_views(std::vector const& image_views) +{ + for (auto& image_view : image_views) + { internal_table.fp_vkDestroyImageView(device, image_view, allocation_callbacks); } } -Swapchain::operator VkSwapchainKHR() const { return this->swapchain; } -SwapchainBuilder& SwapchainBuilder::set_old_swapchain(VkSwapchainKHR old_swapchain) { +Swapchain::operator VkSwapchainKHR() const +{ + return this->swapchain; +} +SwapchainBuilder& SwapchainBuilder::set_old_swapchain(VkSwapchainKHR old_swapchain) +{ info.old_swapchain = old_swapchain; return *this; } -SwapchainBuilder& SwapchainBuilder::set_old_swapchain(Swapchain const& swapchain) { +SwapchainBuilder& SwapchainBuilder::set_old_swapchain(Swapchain const& swapchain) +{ info.old_swapchain = swapchain.swapchain; return *this; } -SwapchainBuilder& SwapchainBuilder::set_desired_extent(uint32_t width, uint32_t height) { - info.desired_width = width; +SwapchainBuilder& SwapchainBuilder::set_desired_extent(uint32_t width, uint32_t height) +{ + info.desired_width = width; info.desired_height = height; return *this; } -SwapchainBuilder& SwapchainBuilder::set_desired_format(VkSurfaceFormatKHR format) { +SwapchainBuilder& SwapchainBuilder::set_desired_format(VkSurfaceFormatKHR format) +{ info.desired_formats.insert(info.desired_formats.begin(), format); return *this; } -SwapchainBuilder& SwapchainBuilder::add_fallback_format(VkSurfaceFormatKHR format) { +SwapchainBuilder& SwapchainBuilder::add_fallback_format(VkSurfaceFormatKHR format) +{ info.desired_formats.push_back(format); return *this; } -SwapchainBuilder& SwapchainBuilder::use_default_format_selection() { +SwapchainBuilder& SwapchainBuilder::use_default_format_selection() +{ info.desired_formats.clear(); add_desired_formats(info.desired_formats); return *this; } -SwapchainBuilder& SwapchainBuilder::set_desired_present_mode(VkPresentModeKHR present_mode) { +SwapchainBuilder& SwapchainBuilder::set_desired_present_mode(VkPresentModeKHR present_mode) +{ info.desired_present_modes.insert(info.desired_present_modes.begin(), present_mode); return *this; } -SwapchainBuilder& SwapchainBuilder::add_fallback_present_mode(VkPresentModeKHR present_mode) { +SwapchainBuilder& SwapchainBuilder::add_fallback_present_mode(VkPresentModeKHR present_mode) +{ info.desired_present_modes.push_back(present_mode); return *this; } -SwapchainBuilder& SwapchainBuilder::use_default_present_mode_selection() { +SwapchainBuilder& SwapchainBuilder::use_default_present_mode_selection() +{ info.desired_present_modes.clear(); add_desired_present_modes(info.desired_present_modes); return *this; } -SwapchainBuilder& SwapchainBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) { +SwapchainBuilder& SwapchainBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) +{ info.allocation_callbacks = callbacks; return *this; } -SwapchainBuilder& SwapchainBuilder::set_image_usage_flags(VkImageUsageFlags usage_flags) { +SwapchainBuilder& SwapchainBuilder::set_image_usage_flags(VkImageUsageFlags usage_flags) +{ info.image_usage_flags = usage_flags; return *this; } -SwapchainBuilder& SwapchainBuilder::add_image_usage_flags(VkImageUsageFlags usage_flags) { +SwapchainBuilder& SwapchainBuilder::add_image_usage_flags(VkImageUsageFlags usage_flags) +{ info.image_usage_flags = info.image_usage_flags | usage_flags; return *this; } -SwapchainBuilder& SwapchainBuilder::use_default_image_usage_flags() { +SwapchainBuilder& SwapchainBuilder::use_default_image_usage_flags() +{ info.image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; return *this; } -SwapchainBuilder& SwapchainBuilder::set_image_array_layer_count(uint32_t array_layer_count) { +SwapchainBuilder& SwapchainBuilder::set_image_array_layer_count(uint32_t array_layer_count) +{ info.array_layer_count = array_layer_count; return *this; } -SwapchainBuilder& SwapchainBuilder::set_desired_min_image_count(uint32_t min_image_count) { +SwapchainBuilder& SwapchainBuilder::set_desired_min_image_count(uint32_t min_image_count) +{ info.min_image_count = min_image_count; return *this; } -SwapchainBuilder& SwapchainBuilder::set_required_min_image_count(uint32_t required_min_image_count) { +SwapchainBuilder& SwapchainBuilder::set_required_min_image_count(uint32_t required_min_image_count) +{ info.required_min_image_count = required_min_image_count; return *this; } -SwapchainBuilder& SwapchainBuilder::set_clipped(bool clipped) { +SwapchainBuilder& SwapchainBuilder::set_clipped(bool clipped) +{ info.clipped = clipped; return *this; } -SwapchainBuilder& SwapchainBuilder::set_create_flags(VkSwapchainCreateFlagBitsKHR create_flags) { +SwapchainBuilder& SwapchainBuilder::set_create_flags(VkSwapchainCreateFlagBitsKHR create_flags) +{ info.create_flags = create_flags; return *this; } -SwapchainBuilder& SwapchainBuilder::set_pre_transform_flags(VkSurfaceTransformFlagBitsKHR pre_transform_flags) { +SwapchainBuilder& SwapchainBuilder::set_pre_transform_flags(VkSurfaceTransformFlagBitsKHR pre_transform_flags) +{ info.pre_transform = pre_transform_flags; return *this; } -SwapchainBuilder& SwapchainBuilder::set_composite_alpha_flags(VkCompositeAlphaFlagBitsKHR composite_alpha_flags) { +SwapchainBuilder& SwapchainBuilder::set_composite_alpha_flags(VkCompositeAlphaFlagBitsKHR composite_alpha_flags) +{ info.composite_alpha = composite_alpha_flags; return *this; } -void SwapchainBuilder::add_desired_formats(std::vector& formats) const { +void SwapchainBuilder::add_desired_formats(std::vector& formats) const +{ formats.push_back({ VK_FORMAT_B8G8R8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR }); formats.push_back({ VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR }); } -void SwapchainBuilder::add_desired_present_modes(std::vector& modes) const { +void SwapchainBuilder::add_desired_present_modes(std::vector& modes) const +{ modes.push_back(VK_PRESENT_MODE_MAILBOX_KHR); modes.push_back(VK_PRESENT_MODE_FIFO_KHR); } -SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height) { - if (!SDL_Init(SDL_INIT_VIDEO)) throw sdl_exception(); +SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height) +{ + if (!SDL_Init(SDL_INIT_VIDEO)) + throw sdl_exception(); SDL_WindowFlags flags = 0; flags |= SDL_WINDOW_VULKAN; - if (resizable) flags |= SDL_WINDOW_RESIZABLE; + if (resizable) + flags |= SDL_WINDOW_RESIZABLE; auto window = SDL_CreateWindow(window_name, width, height, flags); - if (window == nullptr) throw sdl_exception(); + if (window == nullptr) + throw sdl_exception(); return window; } -void destroy_window_sdl(SDL_Window* window) { +void destroy_window_sdl(SDL_Window* window) +{ SDL_DestroyWindow(window); SDL_Quit(); } -VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator) { +VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window* window, VkAllocationCallbacks* allocator) +{ VkSurfaceKHR surface = VK_NULL_HANDLE; - if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) { + if (!SDL_Vulkan_CreateSurface(window, instance, allocator, &surface)) + { surface = VK_NULL_HANDLE; throw sdl_exception(); } return surface; } -void create_swapchain(SwapchainBuilder& swapchain_builder, Swapchain& swapchain) { +void create_swapchain(SwapchainBuilder& swapchain_builder, Swapchain& swapchain) +{ auto new_swapchain = swapchain_builder.set_old_swapchain(swapchain).build(); destroy_swapchain(swapchain); swapchain = new_swapchain; } -VkCommandPool create_command_pool( - vkb::DispatchTable const& disp, - uint32_t queue_family_index -) { +VkCommandPool create_command_pool(vkb::DispatchTable const& disp, uint32_t queue_family_index) +{ VkCommandPoolCreateInfo pool_info = {}; - pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; - pool_info.queueFamilyIndex = queue_family_index; + pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + pool_info.queueFamilyIndex = queue_family_index; VkCommandPool command_pool; - auto result = disp.createCommandPool(&pool_info, nullptr, &command_pool); + auto result = disp.createCommandPool(&pool_info, nullptr, &command_pool); VERIFY_VK_RESULT("failed to create command pool", result); return command_pool; } -Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& disp, const int max_frames_in_flight) { +Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& disp, const int max_frames_in_flight) +{ Sync sync; sync.available_semaphores.resize(max_frames_in_flight); @@ -2252,13 +2748,14 @@ Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& d sync.image_in_flight.resize(swapchain.image_count, VK_NULL_HANDLE); VkSemaphoreCreateInfo semaphore_info = {}; - semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; VkFenceCreateInfo fence_info = {}; - fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; - fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; + fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; - for (size_t i = 0; i < max_frames_in_flight; i++) { + for (size_t i = 0; i < max_frames_in_flight; i++) + { VkResult result; result = disp.createSemaphore(&semaphore_info, nullptr, &sync.available_semaphores[i]); VERIFY_VK_RESULT("failed to create available semaphore", result); @@ -2270,14 +2767,16 @@ Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& d return sync; } -std::vector readFile(const std::string& filename) { +std::vector readFile(const std::string& filename) +{ std::ifstream file(filename, std::ios::ate | std::ios::binary); - if (!file.is_open()) { + if (!file.is_open()) + { throw std::runtime_error("failed to open file!"); } - size_t file_size = (size_t)file.tellg(); + size_t file_size = (size_t)file.tellg(); std::vector buffer(file_size); file.seekg(0); @@ -2288,25 +2787,28 @@ std::vector readFile(const std::string& filename) { return buffer; } -VkShaderModule createShaderModule(vkb::DispatchTable const& disp, const std::vector& code) { +VkShaderModule createShaderModule(vkb::DispatchTable const& disp, const std::vector& code) +{ VkShaderModuleCreateInfo create_info = {}; - create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - create_info.codeSize = code.size(); - create_info.pCode = reinterpret_cast(code.data()); + create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + create_info.codeSize = code.size(); + create_info.pCode = reinterpret_cast(code.data()); VkShaderModule shaderModule; - auto result = disp.createShaderModule(&create_info, nullptr, &shaderModule); + auto result = disp.createShaderModule(&create_info, nullptr, &shaderModule); VERIFY_VK_RESULT("failed to create shader module", result); return shaderModule; } -VkShaderModule readShaderFromFile(vkb::DispatchTable const& disp, const std::string& filename) { +VkShaderModule readShaderFromFile(vkb::DispatchTable const& disp, const std::string& filename) +{ std::vector code = readFile(filename); return createShaderModule(disp, code); } -std::exception vulkan_exception(const char* message, VkResult result) { +std::exception vulkan_exception(const char* message, VkResult result) +{ std::string error_message; error_message.append(message); error_message.append(": "); @@ -2314,7 +2816,8 @@ std::exception vulkan_exception(const char* message, VkResult result) { return std::runtime_error(error_message); } -std::exception sdl_exception() { +std::exception sdl_exception() +{ return std::runtime_error(SDL_GetError()); } @@ -2348,11 +2851,12 @@ void device_initialization_phase_5(SwapchainBuilder& swapchain_builder, Init& in { create_swapchain(swapchain_builder, init.swapchain); - init.swapchain_images = init.swapchain.get_images(); + init.swapchain_images = init.swapchain.get_images(); init.swapchain_image_views = init.swapchain.get_image_views(); } -Init device_initialization(const std::string& window_name) { +Init device_initialization(const std::string& window_name) +{ Init init; device_initialization_phase_1(window_name, init); @@ -2372,7 +2876,8 @@ Init device_initialization(const std::string& window_name) { return init; } -void cleanup_init(Init& init) { +void cleanup_init(Init& init) +{ init.swapchain.destroy_image_views(init.swapchain_image_views); destroy_swapchain(init.swapchain); @@ -2382,14 +2887,16 @@ void cleanup_init(Init& init) { destroy_window_sdl(init.window); } -void recreate_init_swapchain(SwapchainBuilder& swapchain_builder, Init& init, bool wait_for_idle) { - if (wait_for_idle) init.disp.deviceWaitIdle(); +void recreate_init_swapchain(SwapchainBuilder& swapchain_builder, Init& init, bool wait_for_idle) +{ + if (wait_for_idle) + init.disp.deviceWaitIdle(); init.swapchain.destroy_image_views(init.swapchain_image_views); create_swapchain(swapchain_builder, init.swapchain); - init.swapchain_images = init.swapchain.get_images(); + init.swapchain_images = init.swapchain.get_images(); init.swapchain_image_views = init.swapchain.get_image_views(); } @@ -2415,12 +2922,15 @@ void TestAppBase::run(const std::string& window_name) this->setup(); - bool running = true; - int frame_num = 0; - while (running) { + bool running = true; + int frame_num = 0; + while (running) + { SDL_Event windowEvent; - while (SDL_PollEvent(&windowEvent)) { - if (windowEvent.type == SDL_EVENT_QUIT) { + while (SDL_PollEvent(&windowEvent)) + { + if (windowEvent.type == SDL_EVENT_QUIT) + { break; } } @@ -2445,7 +2955,8 @@ void TestAppBase::recreate_swapchain(bool wait_for_idle) void TestAppBase::setup() {} void TestAppBase::cleanup() {} -void TestAppBase::configure_instance_builder(InstanceBuilder& instance_builder) { +void TestAppBase::configure_instance_builder(InstanceBuilder& instance_builder) +{ instance_builder.use_default_debug_messenger().request_validation_layers(); } void TestAppBase::configure_physical_device_selector(PhysicalDeviceSelector& phys_device_selector) {} diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 02048420f3..4d134dbece 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -71,13 +71,16 @@ std::exception vulkan_exception(const char* message, VkResult result); std::exception sdl_exception(); GFXRECON_BEGIN_NAMESPACE(detail) -struct GenericFeaturesPNextNode { +struct GenericFeaturesPNextNode +{ static const uint32_t field_capacity = 256; GenericFeaturesPNextNode(); - template GenericFeaturesPNextNode(T const& features) noexcept { + template + GenericFeaturesPNextNode(T const& features) noexcept + { memset(fields, UINT8_MAX, sizeof(VkBool32) * field_capacity); memcpy(this, &features, sizeof(T)); } @@ -87,17 +90,22 @@ struct GenericFeaturesPNextNode { void combine(GenericFeaturesPNextNode const& right) noexcept; VkStructureType sType = static_cast(0); - void* pNext = nullptr; - VkBool32 fields[field_capacity]; + void* pNext = nullptr; + VkBool32 fields[field_capacity]; }; -struct GenericFeatureChain { +struct GenericFeatureChain +{ std::vector nodes; - template void add(T const& features) noexcept { + template + void add(T const& features) noexcept + { // If this struct is already in the list, combine it - for (auto& node : nodes) { - if (static_cast(features.sType) == node.sType) { + for (auto& node : nodes) + { + if (static_cast(features.sType) == node.sType) + { node.combine(features); return; } @@ -116,7 +124,8 @@ struct GenericFeatureChain { GFXRECON_END_NAMESPACE(detail) -enum class InstanceError { +enum class InstanceError +{ vulkan_unavailable, vulkan_version_unavailable, vulkan_version_1_1_unavailable, @@ -127,13 +136,15 @@ enum class InstanceError { requested_extensions_not_present, windowing_extensions_not_present, }; -enum class PhysicalDeviceError { +enum class PhysicalDeviceError +{ no_surface_provided, failed_enumerate_physical_devices, no_physical_devices_found, no_suitable_device, }; -enum class QueueError { +enum class QueueError +{ present_unavailable, graphics_unavailable, compute_unavailable, @@ -141,11 +152,13 @@ enum class QueueError { queue_index_out_of_range, invalid_queue_family_index }; -enum class DeviceError { +enum class DeviceError +{ failed_create_device, VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features, }; -enum class SwapchainError { +enum class SwapchainError +{ surface_handle_not_provided, failed_query_surface_support_details, failed_create_swapchain, @@ -179,7 +192,8 @@ std::exception to_exception(SwapchainError err, VkResult result); // Gathers useful information about the available vulkan capabilities, like layers and instance // extensions. Use this for enabling features conditionally, ie if you would like an extension but // can use a fallback if it isn't supported but need to know if support is available first. -struct SystemInfo { +struct SystemInfo +{ private: SystemInfo(); @@ -193,10 +207,10 @@ struct SystemInfo { // Returns true if an extension is available bool is_extension_available(const char* extension_name) const; - std::vector available_layers; + std::vector available_layers; std::vector available_extensions; - bool validation_layers_available = false; - bool debug_utils_available = false; + bool validation_layers_available = false; + bool debug_utils_available = false; }; // Forward declared - check VkBoostrap.cpp for implementations @@ -204,11 +218,13 @@ const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s) const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s); // Default debug messenger -// Feel free to copy-paste it into your own code, change it as needed, then call `set_debug_callback()` to use that instead +// Feel free to copy-paste it into your own code, change it as needed, then call `set_debug_callback()` to use that +// instead inline VKAPI_ATTR VkBool32 VKAPI_CALL default_debug_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageType, + VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, - void*) { + void*) +{ auto ms = to_string_message_severity(messageSeverity); auto mt = to_string_message_type(messageType); printf("[%s: %s]\n%s\n", ms, mt, pCallbackData->pMessage); @@ -219,12 +235,13 @@ inline VKAPI_ATTR VkBool32 VKAPI_CALL default_debug_callback(VkDebugUtilsMessage class InstanceBuilder; class PhysicalDeviceSelector; -struct Instance { - VkInstance instance = VK_NULL_HANDLE; - VkDebugUtilsMessengerEXT debug_messenger = VK_NULL_HANDLE; - VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; +struct Instance +{ + VkInstance instance = VK_NULL_HANDLE; + VkDebugUtilsMessengerEXT debug_messenger = VK_NULL_HANDLE; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; - PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; + PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; // A conversion function which allows this Instance to be used // in places where VkInstance would have been used. @@ -234,18 +251,20 @@ struct Instance { vkb::InstanceDispatchTable make_table() const; private: - bool headless = false; - bool properties2_ext_enabled = false; - uint32_t instance_version = VKB_VK_API_VERSION_1_0; - uint32_t api_version = VKB_VK_API_VERSION_1_0; + bool headless = false; + bool properties2_ext_enabled = false; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + uint32_t api_version = VKB_VK_API_VERSION_1_0; friend class InstanceBuilder; friend class PhysicalDeviceSelector; }; void destroy_surface(Instance const& instance, VkSurfaceKHR surface); // release surface handle -void destroy_surface(VkInstance instance, VkSurfaceKHR surface, VkAllocationCallbacks* callbacks = nullptr); // release surface handle -void destroy_instance(Instance const& instance); // release instance resources +void destroy_surface(VkInstance instance, + VkSurfaceKHR surface, + VkAllocationCallbacks* callbacks = nullptr); // release surface handle +void destroy_instance(Instance const& instance); // release instance resources /* If headless mode is false, by default vk-bootstrap use the following logic to enable the windowing extensions @@ -267,7 +286,8 @@ Use `InstanceBuilder::enable_extension()` to add new extensions without altering Feel free to make a PR or raise an issue to include additional platforms. */ -class InstanceBuilder { +class InstanceBuilder +{ public: // Default constructor, will load vulkan. explicit InstanceBuilder(); @@ -311,7 +331,8 @@ class InstanceBuilder { [[deprecated("Use require_api_version + set_minimum_instance_version instead.")]] InstanceBuilder& desire_api_version(uint32_t preferred_vulkan_version); - // Prefer a vulkan instance API version. If the desired version isn't available, it will use the highest version available. + // Prefer a vulkan instance API version. If the desired version isn't available, it will use the highest version + // available. [[deprecated("Use require_api_version + set_minimum_instance_version instead.")]] InstanceBuilder& desire_api_version(uint32_t major, uint32_t minor, uint32_t patch = 0); @@ -361,25 +382,26 @@ class InstanceBuilder { InstanceBuilder& set_allocation_callbacks(VkAllocationCallbacks* callbacks); private: - struct InstanceInfo { + struct InstanceInfo + { // VkApplicationInfo - const char* app_name = nullptr; - const char* engine_name = nullptr; - uint32_t application_version = 0; - uint32_t engine_version = 0; - uint32_t minimum_instance_version = 0; - uint32_t required_api_version = VKB_VK_API_VERSION_1_0; - uint32_t desired_api_version = VKB_VK_API_VERSION_1_0; + const char* app_name = nullptr; + const char* engine_name = nullptr; + uint32_t application_version = 0; + uint32_t engine_version = 0; + uint32_t minimum_instance_version = 0; + uint32_t required_api_version = VKB_VK_API_VERSION_1_0; + uint32_t desired_api_version = VKB_VK_API_VERSION_1_0; // VkInstanceCreateInfo - std::vector layers; - std::vector extensions; - VkInstanceCreateFlags flags = static_cast(0); + std::vector layers; + std::vector extensions; + VkInstanceCreateFlags flags = static_cast(0); std::vector pNext_elements; // debug callback - use the default so it is not nullptr PFN_vkDebugUtilsMessengerCallbackEXT debug_callback = default_debug_callback; - VkDebugUtilsMessageSeverityFlagsEXT debug_message_severity = + VkDebugUtilsMessageSeverityFlagsEXT debug_message_severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; VkDebugUtilsMessageTypeFlagsEXT debug_message_type = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | @@ -387,42 +409,45 @@ class InstanceBuilder { void* debug_user_data_pointer = nullptr; // validation features - std::vector disabled_validation_checks; - std::vector enabled_validation_features; + std::vector disabled_validation_checks; + std::vector enabled_validation_features; std::vector disabled_validation_features; // Custom allocator VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; bool request_validation_layers = false; - bool enable_validation_layers = false; - bool use_debug_messenger = false; - bool headless_context = false; + bool enable_validation_layers = false; + bool use_debug_messenger = false; + bool headless_context = false; PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; } info; }; -VKAPI_ATTR VkBool32 VKAPI_CALL default_debug_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageType, +VKAPI_ATTR VkBool32 VKAPI_CALL default_debug_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, - void* pUserData); + void* pUserData); -void destroy_debug_utils_messenger( - VkInstance const instance, VkDebugUtilsMessengerEXT const messenger, VkAllocationCallbacks* allocation_callbacks = nullptr); +void destroy_debug_utils_messenger(VkInstance const instance, + VkDebugUtilsMessengerEXT const messenger, + VkAllocationCallbacks* allocation_callbacks = nullptr); // ---- Physical Device ---- // class PhysicalDeviceSelector; class DeviceBuilder; -struct PhysicalDevice { - std::string name; +struct PhysicalDevice +{ + std::string name; VkPhysicalDevice physical_device = VK_NULL_HANDLE; - VkSurfaceKHR surface = VK_NULL_HANDLE; + VkSurfaceKHR surface = VK_NULL_HANDLE; - // Note that this reflects selected features carried over from required features, not all features the physical device supports. - VkPhysicalDeviceFeatures features{}; - VkPhysicalDeviceProperties properties{}; + // Note that this reflects selected features carried over from required features, not all features the physical + // device supports. + VkPhysicalDeviceFeatures features{}; + VkPhysicalDeviceProperties properties{}; VkPhysicalDeviceMemoryProperties memory_properties{}; // Has a queue family that supports compute operations but not graphics nor transfer. @@ -448,7 +473,9 @@ struct PhysicalDevice { bool is_extension_present(const char* extension) const; // Returns true if all the features are present - template bool are_extension_features_present(T const& features) const { + template + bool are_extension_features_present(T const& features) const + { return is_features_node_present(detail::GenericFeaturesPNextNode(features)); } @@ -466,7 +493,9 @@ struct PhysicalDevice { // If the features from the provided features struct are all present, make all of the features be enable on the // device. Returns true if all of the features are present. - template bool enable_extension_features_if_present(T const& features_check) { + template + bool enable_extension_features_if_present(T const& features_check) + { return enable_features_node_if_present(detail::GenericFeaturesPNextNode(features_check)); } @@ -475,15 +504,20 @@ struct PhysicalDevice { operator VkPhysicalDevice() const; private: - uint32_t instance_version = VKB_VK_API_VERSION_1_0; - std::vector extensions_to_enable; - std::vector available_extensions; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + std::vector extensions_to_enable; + std::vector available_extensions; std::vector queue_families; - detail::GenericFeatureChain extended_features_chain; + detail::GenericFeatureChain extended_features_chain; bool defer_surface_initialization = false; - bool properties2_ext_enabled = false; - enum class Suitable { yes, partial, no }; + bool properties2_ext_enabled = false; + enum class Suitable + { + yes, + partial, + no + }; Suitable suitable = Suitable::yes; friend class PhysicalDeviceSelector; friend class DeviceBuilder; @@ -492,36 +526,48 @@ struct PhysicalDevice { bool enable_features_node_if_present(detail::GenericFeaturesPNextNode const& node); }; -enum class PreferredDeviceType { other = 0, integrated = 1, discrete = 2, virtual_gpu = 3, cpu = 4 }; +enum class PreferredDeviceType +{ + other = 0, + integrated = 1, + discrete = 2, + virtual_gpu = 3, + cpu = 4 +}; -enum class DeviceSelectionMode { +enum class DeviceSelectionMode +{ // return all suitable and partially suitable devices partially_and_fully_suitable, // return only physical devices which are fully suitable only_fully_suitable }; -// Enumerates the physical devices on the system, and based on the added criteria, returns a physical device or list of physical devies -// A device is considered suitable if it meets all the 'required' and 'desired' criteria. -// A device is considered partially suitable if it meets only the 'required' criteria. -class PhysicalDeviceSelector { +// Enumerates the physical devices on the system, and based on the added criteria, returns a physical device or list of +// physical devies A device is considered suitable if it meets all the 'required' and 'desired' criteria. A device is +// considered partially suitable if it meets only the 'required' criteria. +class PhysicalDeviceSelector +{ public: // Requires a gfxrecon::test::Instance to construct, needed to pass instance creation info. explicit PhysicalDeviceSelector(Instance const& instance); - // Requires a gfxrecon::test::Instance to construct, needed to pass instance creation info, optionally specify the surface here + // Requires a gfxrecon::test::Instance to construct, needed to pass instance creation info, optionally specify the + // surface here explicit PhysicalDeviceSelector(Instance const& instance, VkSurfaceKHR surface); // Return the first device which is suitable // use the `selection` parameter to configure if partially PhysicalDevice select(DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; - // Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device - std::vector select_devices( - DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; + // Return all devices which are considered suitable - intended for applications which want to let the user pick the + // physical device + std::vector + select_devices(DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; - // Return the names of all devices which are considered suitable - intended for applications which want to let the user pick the physical device - std::vector select_device_names( - DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; + // Return the names of all devices which are considered suitable - intended for applications which want to let the + // user pick the physical device + std::vector + select_device_names(DeviceSelectionMode selection = DeviceSelectionMode::partially_and_fully_suitable) const; // Set the surface in which the physical device should render to. // Be sure to set it if swapchain functionality is to be used. @@ -579,7 +625,9 @@ class PhysicalDeviceSelector { // Require a physical device which supports a specific set of general/extension features. // If this function is used, the user should not put their own VkPhysicalDeviceFeatures2 in // the pNext chain of VkDeviceCreateInfo. - template PhysicalDeviceSelector& add_required_extension_features(T const& features) { + template + PhysicalDeviceSelector& add_required_extension_features(T const& features) + { criteria.extended_features_chain.add(features); return *this; } @@ -588,7 +636,8 @@ class PhysicalDeviceSelector { PhysicalDeviceSelector& set_required_features(VkPhysicalDeviceFeatures const& features); #if defined(VKB_VK_API_VERSION_1_2) // Require a physical device which supports the features in VkPhysicalDeviceVulkan11Features. - // Must have vulkan version 1.2 - This is due to the VkPhysicalDeviceVulkan11Features struct being added in 1.2, not 1.1 + // Must have vulkan version 1.2 - This is due to the VkPhysicalDeviceVulkan11Features struct being added in 1.2, + // not 1.1 PhysicalDeviceSelector& set_required_features_11(VkPhysicalDeviceVulkan11Features const& features_11); // Require a physical device which supports the features in VkPhysicalDeviceVulkan12Features. // Must have vulkan version 1.2 @@ -605,50 +654,53 @@ class PhysicalDeviceSelector { PhysicalDeviceSelector& defer_surface_initialization(); // Ignore all criteria and choose the first physical device that is available. - // Only use when: The first gpu in the list may be set by global user preferences and an application may wish to respect it. + // Only use when: The first gpu in the list may be set by global user preferences and an application may wish to + // respect it. PhysicalDeviceSelector& select_first_device_unconditionally(bool unconditionally = true); private: - struct InstanceInfo { - VkInstance instance = VK_NULL_HANDLE; - VkSurfaceKHR surface = VK_NULL_HANDLE; - uint32_t version = VKB_VK_API_VERSION_1_0; - bool headless = false; - bool properties2_ext_enabled = false; + struct InstanceInfo + { + VkInstance instance = VK_NULL_HANDLE; + VkSurfaceKHR surface = VK_NULL_HANDLE; + uint32_t version = VKB_VK_API_VERSION_1_0; + bool headless = false; + bool properties2_ext_enabled = false; } instance_info; // We copy the extension features stored in the selector criteria under the prose of a // "template" to ensure that after fetching everything is compared 1:1 during a match. - struct SelectionCriteria { - std::string name; - PreferredDeviceType preferred_type = PreferredDeviceType::discrete; - bool allow_any_type = true; - bool require_present = true; - bool require_dedicated_transfer_queue = false; - bool require_dedicated_compute_queue = false; - bool require_separate_transfer_queue = false; - bool require_separate_compute_queue = false; - VkDeviceSize required_mem_size = 0; - VkDeviceSize desired_mem_size = 0; + struct SelectionCriteria + { + std::string name; + PreferredDeviceType preferred_type = PreferredDeviceType::discrete; + bool allow_any_type = true; + bool require_present = true; + bool require_dedicated_transfer_queue = false; + bool require_dedicated_compute_queue = false; + bool require_separate_transfer_queue = false; + bool require_separate_compute_queue = false; + VkDeviceSize required_mem_size = 0; + VkDeviceSize desired_mem_size = 0; std::vector required_extensions; std::vector desired_extensions; uint32_t required_version = VKB_VK_API_VERSION_1_0; - uint32_t desired_version = VKB_VK_API_VERSION_1_0; + uint32_t desired_version = VKB_VK_API_VERSION_1_0; - VkPhysicalDeviceFeatures required_features{}; + VkPhysicalDeviceFeatures required_features{}; VkPhysicalDeviceFeatures2 required_features2{}; detail::GenericFeatureChain extended_features_chain; - bool defer_surface_initialization = false; - bool use_first_gpu_unconditionally = false; - bool enable_portability_subset = true; + bool defer_surface_initialization = false; + bool use_first_gpu_unconditionally = false; + bool enable_portability_subset = true; } criteria; - PhysicalDevice populate_device_details( - VkPhysicalDevice phys_device, detail::GenericFeatureChain const& src_extended_features_chain) const; + PhysicalDevice populate_device_details(VkPhysicalDevice phys_device, + detail::GenericFeatureChain const& src_extended_features_chain) const; PhysicalDevice::Suitable is_device_suitable(PhysicalDevice const& phys_device) const; @@ -656,18 +708,25 @@ class PhysicalDeviceSelector { }; // ---- Queue ---- // -enum class QueueType { present, graphics, compute, transfer }; +enum class QueueType +{ + present, + graphics, + compute, + transfer +}; // ---- Device ---- // -struct Device { - VkDevice device = VK_NULL_HANDLE; - PhysicalDevice physical_device; - VkSurfaceKHR surface = VK_NULL_HANDLE; +struct Device +{ + VkDevice device = VK_NULL_HANDLE; + PhysicalDevice physical_device; + VkSurfaceKHR surface = VK_NULL_HANDLE; std::vector queue_families; - VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; - PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; - uint32_t instance_version = VKB_VK_API_VERSION_1_0; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; std::optional get_queue_index(QueueType type) const; // Only a compute or transfer queue type is valid. All other queue types do not support a 'dedicated' queue index @@ -685,25 +744,27 @@ struct Device { operator VkDevice() const; private: - struct { + struct + { PFN_vkGetDeviceQueue fp_vkGetDeviceQueue = nullptr; - PFN_vkDestroyDevice fp_vkDestroyDevice = nullptr; + PFN_vkDestroyDevice fp_vkDestroyDevice = nullptr; } internal_table; friend class DeviceBuilder; friend void destroy_device(Device const& device); }; - // For advanced device queue setup -struct CustomQueueDescription { +struct CustomQueueDescription +{ explicit CustomQueueDescription(uint32_t index, std::vector priorities); - uint32_t index = 0; + uint32_t index = 0; std::vector priorities; }; void destroy_device(Device const& device); -class DeviceBuilder { +class DeviceBuilder +{ public: // Any features and extensions that are requested/required in PhysicalDeviceSelector are automatically enabled. explicit DeviceBuilder(PhysicalDevice physical_device); @@ -716,7 +777,9 @@ class DeviceBuilder { // Add a structure to the pNext chain of VkDeviceCreateInfo. // The structure must be valid when DeviceBuilder::build() is called. - template DeviceBuilder& add_pNext(T* structure) { + template + DeviceBuilder& add_pNext(T* structure) + { info.pNext_chain.push_back(reinterpret_cast(structure)); return *this; } @@ -726,27 +789,32 @@ class DeviceBuilder { private: PhysicalDevice physical_device; - struct DeviceInfo { - VkDeviceCreateFlags flags = static_cast(0); - std::vector pNext_chain; + struct DeviceInfo + { + VkDeviceCreateFlags flags = static_cast(0); + std::vector pNext_chain; std::vector queue_descriptions; - VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; } info; }; // ---- Swapchain ---- // -struct Swapchain { - VkDevice device = VK_NULL_HANDLE; - VkSwapchainKHR swapchain = VK_NULL_HANDLE; - uint32_t image_count = 0; - VkFormat image_format = VK_FORMAT_UNDEFINED; // The image format actually used when creating the swapchain. - VkColorSpaceKHR color_space = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; // The color space actually used when creating the swapchain. +struct Swapchain +{ + VkDevice device = VK_NULL_HANDLE; + VkSwapchainKHR swapchain = VK_NULL_HANDLE; + uint32_t image_count = 0; + VkFormat image_format = VK_FORMAT_UNDEFINED; // The image format actually used when creating the swapchain. + VkColorSpaceKHR color_space = + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; // The color space actually used when creating the swapchain. VkImageUsageFlags image_usage_flags = 0; - VkExtent2D extent = { 0, 0 }; - // The value of minImageCount actually used when creating the swapchain; note that the presentation engine is always free to create more images than that. - uint32_t requested_min_image_count = 0; - VkPresentModeKHR present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR; // The present mode actually used when creating the swapchain. - uint32_t instance_version = VKB_VK_API_VERSION_1_0; + VkExtent2D extent = { 0, 0 }; + // The value of minImageCount actually used when creating the swapchain; note that the presentation engine is always + // free to create more images than that. + uint32_t requested_min_image_count = 0; + VkPresentModeKHR present_mode = + VK_PRESENT_MODE_IMMEDIATE_KHR; // The present mode actually used when creating the swapchain. + uint32_t instance_version = VKB_VK_API_VERSION_1_0; VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; // Returns a vector of VkImage handles to the swapchain. @@ -757,18 +825,19 @@ struct Swapchain { // structure. std::vector get_image_views(); std::vector get_image_views(const void* pNext); - void destroy_image_views(std::vector const& image_views); + void destroy_image_views(std::vector const& image_views); // A conversion function which allows this Swapchain to be used // in places where VkSwapchainKHR would have been used. operator VkSwapchainKHR() const; private: - struct { + struct + { PFN_vkGetSwapchainImagesKHR fp_vkGetSwapchainImagesKHR = nullptr; - PFN_vkCreateImageView fp_vkCreateImageView = nullptr; - PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; - PFN_vkDestroySwapchainKHR fp_vkDestroySwapchainKHR = nullptr; + PFN_vkCreateImageView fp_vkCreateImageView = nullptr; + PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; + PFN_vkDestroySwapchainKHR fp_vkDestroySwapchainKHR = nullptr; } internal_table; friend class SwapchainBuilder; friend void destroy_swapchain(Swapchain const& swapchain); @@ -776,7 +845,8 @@ struct Swapchain { void destroy_swapchain(Swapchain const& swapchain); -class SwapchainBuilder { +class SwapchainBuilder +{ public: // Construct a SwapchainBuilder with a `gfxrecon::test::Device` explicit SwapchainBuilder(Device const& device); @@ -785,11 +855,11 @@ class SwapchainBuilder { // Construct a SwapchainBuilder with Vulkan handles for the physical device, device, and surface // Optionally can provide the uint32_t indices for the graphics and present queue // Note: The constructor will query the graphics & present queue if the indices are not provided - explicit SwapchainBuilder(VkPhysicalDevice const physical_device, - VkDevice const device, - VkSurfaceKHR const surface, + explicit SwapchainBuilder(VkPhysicalDevice const physical_device, + VkDevice const device, + VkSurfaceKHR const surface, std::optional graphics_queue_index = {}, - std::optional present_queue_index = {}); + std::optional present_queue_index = {}); Swapchain build() const; @@ -798,7 +868,6 @@ class SwapchainBuilder { SwapchainBuilder& set_old_swapchain(VkSwapchainKHR old_swapchain); SwapchainBuilder& set_old_swapchain(Swapchain const& swapchain); - // Desired size of the swapchain. By default, the swapchain will use the size // of the window being drawn to. SwapchainBuilder& set_desired_extent(uint32_t width, uint32_t height); @@ -820,7 +889,8 @@ class SwapchainBuilder { SwapchainBuilder& use_default_present_mode_selection(); // Set the bitmask of the image usage for acquired swapchain images. - // If the surface capabilities cannot allow it, building the swapchain will result in the `SwapchainError::required_usage_not_supported` error. + // If the surface capabilities cannot allow it, building the swapchain will result in the + // `SwapchainError::required_usage_not_supported` error. SwapchainBuilder& set_image_usage_flags(VkImageUsageFlags usage_flags); // Add a image usage to the bitmask for acquired swapchain images. SwapchainBuilder& add_image_usage_flags(VkImageUsageFlags usage_flags); @@ -834,7 +904,8 @@ class SwapchainBuilder { // Convenient named constants for passing to set_desired_min_image_count(). // Note that it is not an `enum class`, so its constants can be passed as an integer value without casting // In other words, these might as well be `static const int`, but they benefit from being grouped together this way. - enum BufferMode { + enum BufferMode + { SINGLE_BUFFERING = 1, DOUBLE_BUFFERING = 2, TRIPLE_BUFFERING = 3, @@ -843,13 +914,15 @@ class SwapchainBuilder { // Sets the desired minimum image count for the swapchain. // Note that the presentation engine is always free to create more images than requested. // You may pass one of the values specified in the BufferMode enum, or any integer value. - // For instance, if you pass DOUBLE_BUFFERING, the presentation engine is allowed to give you a double buffering setup, triple buffering, or more. This is up to the drivers. + // For instance, if you pass DOUBLE_BUFFERING, the presentation engine is allowed to give you a double buffering + // setup, triple buffering, or more. This is up to the drivers. SwapchainBuilder& set_desired_min_image_count(uint32_t min_image_count); // Sets a required minimum image count for the swapchain. - // If the surface capabilities cannot allow it, building the swapchain will result in the `SwapchainError::required_min_image_count_too_low` error. - // Otherwise, the same observations from set_desired_min_image_count() apply. - // A value of 0 is specially interpreted as meaning "no requirement", and is the behavior by default. + // If the surface capabilities cannot allow it, building the swapchain will result in the + // `SwapchainError::required_min_image_count_too_low` error. Otherwise, the same observations from + // set_desired_min_image_count() apply. A value of 0 is specially interpreted as meaning "no requirement", and is + // the behavior by default. SwapchainBuilder& set_required_min_image_count(uint32_t required_min_image_count); // Set whether the Vulkan implementation is allowed to discard rendering operations that @@ -863,12 +936,15 @@ class SwapchainBuilder { SwapchainBuilder& set_create_flags(VkSwapchainCreateFlagBitsKHR create_flags); // Set the transform to be applied, like a 90 degree rotation. Default is no transform. SwapchainBuilder& set_pre_transform_flags(VkSurfaceTransformFlagBitsKHR pre_transform_flags); - // Set the alpha channel to be used with other windows in on the system. Default is VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR. + // Set the alpha channel to be used with other windows in on the system. Default is + // VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR. SwapchainBuilder& set_composite_alpha_flags(VkCompositeAlphaFlagBitsKHR composite_alpha_flags); // Add a structure to the pNext chain of VkSwapchainCreateInfoKHR. // The structure must be valid when SwapchainBuilder::build() is called. - template SwapchainBuilder& add_pNext(T* structure) { + template + SwapchainBuilder& add_pNext(T* structure) + { info.pNext_chain.push_back(reinterpret_cast(structure)); return *this; } @@ -880,56 +956,58 @@ class SwapchainBuilder { void add_desired_formats(std::vector& formats) const; void add_desired_present_modes(std::vector& modes) const; - struct SwapchainInfo { - VkPhysicalDevice physical_device = VK_NULL_HANDLE; - VkDevice device = VK_NULL_HANDLE; + struct SwapchainInfo + { + VkPhysicalDevice physical_device = VK_NULL_HANDLE; + VkDevice device = VK_NULL_HANDLE; std::vector pNext_chain; - VkSwapchainCreateFlagBitsKHR create_flags = static_cast(0); - VkSurfaceKHR surface = VK_NULL_HANDLE; - std::vector desired_formats; - uint32_t instance_version = VKB_VK_API_VERSION_1_0; - uint32_t desired_width = 256; - uint32_t desired_height = 256; - uint32_t array_layer_count = 1; - uint32_t min_image_count = 0; - uint32_t required_min_image_count = 0; - VkImageUsageFlags image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; - std::optional graphics_queue_index = {}; - std::optional present_queue_index = {}; - VkSurfaceTransformFlagBitsKHR pre_transform = static_cast(0); + VkSwapchainCreateFlagBitsKHR create_flags = static_cast(0); + VkSurfaceKHR surface = VK_NULL_HANDLE; + std::vector desired_formats; + uint32_t instance_version = VKB_VK_API_VERSION_1_0; + uint32_t desired_width = 256; + uint32_t desired_height = 256; + uint32_t array_layer_count = 1; + uint32_t min_image_count = 0; + uint32_t required_min_image_count = 0; + VkImageUsageFlags image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + std::optional graphics_queue_index = {}; + std::optional present_queue_index = {}; + VkSurfaceTransformFlagBitsKHR pre_transform = static_cast(0); #if defined(__ANDROID__) VkCompositeAlphaFlagBitsKHR composite_alpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR; #else VkCompositeAlphaFlagBitsKHR composite_alpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; #endif std::vector desired_present_modes; - bool clipped = true; - VkSwapchainKHR old_swapchain = VK_NULL_HANDLE; - VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; + bool clipped = true; + VkSwapchainKHR old_swapchain = VK_NULL_HANDLE; + VkAllocationCallbacks* allocation_callbacks = VK_NULL_HANDLE; } info; }; -SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height); -void destroy_window_sdl(SDL_Window * window); -VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window * window, VkAllocationCallbacks* allocator = nullptr); -void create_swapchain(SwapchainBuilder& swapchain_builder, Swapchain& swapchain); +SDL_Window* create_window_sdl(const char* window_name, bool resizable, int width, int height); +void destroy_window_sdl(SDL_Window* window); +VkSurfaceKHR create_surface_sdl(VkInstance instance, SDL_Window* window, VkAllocationCallbacks* allocator = nullptr); +void create_swapchain(SwapchainBuilder& swapchain_builder, Swapchain& swapchain); VkCommandPool create_command_pool(vkb::DispatchTable const& disp, uint32_t queue_family_index); -struct Sync { +struct Sync +{ std::vector available_semaphores; std::vector finished_semaphore; - std::vector in_flight_fences; - std::vector image_in_flight; + std::vector in_flight_fences; + std::vector image_in_flight; - Sync() = default; + Sync() = default; ~Sync() = default; - Sync(const Sync&) = delete; - Sync& operator =(const Sync&) = delete; + Sync(const Sync&) = delete; + Sync& operator=(const Sync&) = delete; - Sync(Sync&&) = default; - Sync& operator =(Sync&&) = default; + Sync(Sync&&) = default; + Sync& operator=(Sync&&) = default; }; Sync create_sync_objects(Swapchain const& swapchain, vkb::DispatchTable const& disp, const int max_frames_in_flight); @@ -940,19 +1018,24 @@ VkShaderModule createShaderModule(vkb::DispatchTable const& disp, const std::vec VkShaderModule readShaderFromFile(vkb::DispatchTable const& disp, const std::string& filename); -#define VERIFY_VK_RESULT(message, result) { if (result != VK_SUCCESS) throw gfxrecon::test::vulkan_exception(message, result); } +#define VERIFY_VK_RESULT(message, result) \ + { \ + if (result != VK_SUCCESS) \ + throw gfxrecon::test::vulkan_exception(message, result); \ + } -struct Init { - SDL_Window* window; - Instance instance; +struct Init +{ + SDL_Window* window; + Instance instance; vkb::InstanceDispatchTable inst_disp; - VkSurfaceKHR surface; - PhysicalDevice physical_device; - Device device; - vkb::DispatchTable disp; - Swapchain swapchain; - std::vector swapchain_images; - std::vector swapchain_image_views; + VkSurfaceKHR surface; + PhysicalDevice physical_device; + Device device; + vkb::DispatchTable disp; + Swapchain swapchain; + std::vector swapchain_images; + std::vector swapchain_image_views; }; Init device_initialization(const std::string& window_name); @@ -961,16 +1044,18 @@ void cleanup_init(Init& init); void recreate_init_swapchain(Init& init, bool wait_for_idle = true); -class TestAppBase { +class TestAppBase +{ public: void run(const std::string& window_name); + protected: - TestAppBase() = default; - ~TestAppBase() = default; - TestAppBase(const TestAppBase&) = delete; + TestAppBase() = default; + ~TestAppBase() = default; + TestAppBase(const TestAppBase&) = delete; TestAppBase& operator=(const TestAppBase&) = delete; - TestAppBase(TestAppBase&&) = delete; - TestAppBase& operator=(TestAppBase&&) = delete; + TestAppBase(TestAppBase&&) = delete; + TestAppBase& operator=(TestAppBase&&) = delete; void recreate_swapchain(bool wait_for_idle); diff --git a/test/test_apps/common/test_app_dispatch.h b/test/test_apps/common/test_app_dispatch.h index d14588a614..c5c0d60f77 100644 --- a/test/test_apps/common/test_app_dispatch.h +++ b/test/test_apps/common/test_app_dispatch.h @@ -5,1658 +5,2283 @@ * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the “Software”), to deal in the Software without restriction, including without * limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies - * of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following + * conditions: * - * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the + * Software. * * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT * LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE + * USE OR OTHER DEALINGS IN THE SOFTWARE. * */ // This file is a part of VkBootstrap // https://github.com/charles-lunarg/vk-bootstrap - #pragma once #include -namespace vkb { +namespace vkb +{ -struct InstanceDispatchTable { +struct InstanceDispatchTable +{ InstanceDispatchTable() = default; - InstanceDispatchTable(VkInstance instance, PFN_vkGetInstanceProcAddr procAddr) : instance(instance), populated(true) { + InstanceDispatchTable(VkInstance instance, PFN_vkGetInstanceProcAddr procAddr) : instance(instance), populated(true) + { fp_vkDestroyInstance = reinterpret_cast(procAddr(instance, "vkDestroyInstance")); - fp_vkEnumeratePhysicalDevices = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDevices")); - fp_vkGetInstanceProcAddr = reinterpret_cast(procAddr(instance, "vkGetInstanceProcAddr")); - fp_vkGetPhysicalDeviceProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties")); - fp_vkGetPhysicalDeviceQueueFamilyProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties")); - fp_vkGetPhysicalDeviceMemoryProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMemoryProperties")); - fp_vkGetPhysicalDeviceFeatures = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures")); - fp_vkGetPhysicalDeviceFormatProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFormatProperties")); - fp_vkGetPhysicalDeviceImageFormatProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties")); - fp_vkEnumerateDeviceLayerProperties = reinterpret_cast(procAddr(instance, "vkEnumerateDeviceLayerProperties")); - fp_vkEnumerateDeviceExtensionProperties = reinterpret_cast(procAddr(instance, "vkEnumerateDeviceExtensionProperties")); - fp_vkGetPhysicalDeviceSparseImageFormatProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties")); + fp_vkEnumeratePhysicalDevices = + reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDevices")); + fp_vkGetInstanceProcAddr = + reinterpret_cast(procAddr(instance, "vkGetInstanceProcAddr")); + fp_vkGetPhysicalDeviceProperties = + reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties")); + fp_vkGetPhysicalDeviceQueueFamilyProperties = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties")); + fp_vkGetPhysicalDeviceMemoryProperties = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceMemoryProperties")); + fp_vkGetPhysicalDeviceFeatures = + reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures")); + fp_vkGetPhysicalDeviceFormatProperties = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceFormatProperties")); + fp_vkGetPhysicalDeviceImageFormatProperties = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties")); + fp_vkEnumerateDeviceLayerProperties = reinterpret_cast( + procAddr(instance, "vkEnumerateDeviceLayerProperties")); + fp_vkEnumerateDeviceExtensionProperties = reinterpret_cast( + procAddr(instance, "vkEnumerateDeviceExtensionProperties")); + fp_vkGetPhysicalDeviceSparseImageFormatProperties = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties")); #if (defined(VK_KHR_android_surface)) - fp_vkCreateAndroidSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateAndroidSurfaceKHR")); + fp_vkCreateAndroidSurfaceKHR = + reinterpret_cast(procAddr(instance, "vkCreateAndroidSurfaceKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkGetPhysicalDeviceDisplayPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayPropertiesKHR")); + fp_vkGetPhysicalDeviceDisplayPropertiesKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceDisplayPropertiesKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR")); + fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkGetDisplayPlaneSupportedDisplaysKHR = reinterpret_cast(procAddr(instance, "vkGetDisplayPlaneSupportedDisplaysKHR")); + fp_vkGetDisplayPlaneSupportedDisplaysKHR = reinterpret_cast( + procAddr(instance, "vkGetDisplayPlaneSupportedDisplaysKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkGetDisplayModePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetDisplayModePropertiesKHR")); + fp_vkGetDisplayModePropertiesKHR = + reinterpret_cast(procAddr(instance, "vkGetDisplayModePropertiesKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkCreateDisplayModeKHR = reinterpret_cast(procAddr(instance, "vkCreateDisplayModeKHR")); + fp_vkCreateDisplayModeKHR = + reinterpret_cast(procAddr(instance, "vkCreateDisplayModeKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkGetDisplayPlaneCapabilitiesKHR = reinterpret_cast(procAddr(instance, "vkGetDisplayPlaneCapabilitiesKHR")); + fp_vkGetDisplayPlaneCapabilitiesKHR = reinterpret_cast( + procAddr(instance, "vkGetDisplayPlaneCapabilitiesKHR")); #endif #if (defined(VK_KHR_display)) - fp_vkCreateDisplayPlaneSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateDisplayPlaneSurfaceKHR")); + fp_vkCreateDisplayPlaneSurfaceKHR = + reinterpret_cast(procAddr(instance, "vkCreateDisplayPlaneSurfaceKHR")); #endif #if (defined(VK_KHR_surface)) fp_vkDestroySurfaceKHR = reinterpret_cast(procAddr(instance, "vkDestroySurfaceKHR")); #endif #if (defined(VK_KHR_surface)) - fp_vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceSupportKHR")); + fp_vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfaceSupportKHR")); #endif #if (defined(VK_KHR_surface)) - fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); + fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); #endif #if (defined(VK_KHR_surface)) - fp_vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR")); + fp_vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR")); #endif #if (defined(VK_KHR_surface)) - fp_vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR")); + fp_vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR")); #endif #if (defined(VK_NN_vi_surface)) fp_vkCreateViSurfaceNN = reinterpret_cast(procAddr(instance, "vkCreateViSurfaceNN")); #endif #if (defined(VK_KHR_wayland_surface)) - fp_vkCreateWaylandSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateWaylandSurfaceKHR")); + fp_vkCreateWaylandSurfaceKHR = + reinterpret_cast(procAddr(instance, "vkCreateWaylandSurfaceKHR")); #endif #if (defined(VK_KHR_wayland_surface)) - fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR")); + fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR")); #endif #if (defined(VK_KHR_win32_surface)) - fp_vkCreateWin32SurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateWin32SurfaceKHR")); + fp_vkCreateWin32SurfaceKHR = + reinterpret_cast(procAddr(instance, "vkCreateWin32SurfaceKHR")); #endif #if (defined(VK_KHR_win32_surface)) - fp_vkGetPhysicalDeviceWin32PresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR")); + fp_vkGetPhysicalDeviceWin32PresentationSupportKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR")); #endif #if (defined(VK_KHR_xlib_surface)) - fp_vkCreateXlibSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateXlibSurfaceKHR")); + fp_vkCreateXlibSurfaceKHR = + reinterpret_cast(procAddr(instance, "vkCreateXlibSurfaceKHR")); #endif #if (defined(VK_KHR_xlib_surface)) - fp_vkGetPhysicalDeviceXlibPresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR")); + fp_vkGetPhysicalDeviceXlibPresentationSupportKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR")); #endif #if (defined(VK_KHR_xcb_surface)) - fp_vkCreateXcbSurfaceKHR = reinterpret_cast(procAddr(instance, "vkCreateXcbSurfaceKHR")); + fp_vkCreateXcbSurfaceKHR = + reinterpret_cast(procAddr(instance, "vkCreateXcbSurfaceKHR")); #endif #if (defined(VK_KHR_xcb_surface)) - fp_vkGetPhysicalDeviceXcbPresentationSupportKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR")); + fp_vkGetPhysicalDeviceXcbPresentationSupportKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR")); #endif #if (defined(VK_EXT_directfb_surface)) - fp_vkCreateDirectFBSurfaceEXT = reinterpret_cast(procAddr(instance, "vkCreateDirectFBSurfaceEXT")); + fp_vkCreateDirectFBSurfaceEXT = + reinterpret_cast(procAddr(instance, "vkCreateDirectFBSurfaceEXT")); #endif #if (defined(VK_EXT_directfb_surface)) - fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT")); + fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT")); #endif #if (defined(VK_FUCHSIA_imagepipe_surface)) - fp_vkCreateImagePipeSurfaceFUCHSIA = reinterpret_cast(procAddr(instance, "vkCreateImagePipeSurfaceFUCHSIA")); + fp_vkCreateImagePipeSurfaceFUCHSIA = reinterpret_cast( + procAddr(instance, "vkCreateImagePipeSurfaceFUCHSIA")); #endif #if (defined(VK_GGP_stream_descriptor_surface)) - fp_vkCreateStreamDescriptorSurfaceGGP = reinterpret_cast(procAddr(instance, "vkCreateStreamDescriptorSurfaceGGP")); + fp_vkCreateStreamDescriptorSurfaceGGP = reinterpret_cast( + procAddr(instance, "vkCreateStreamDescriptorSurfaceGGP")); #endif #if (defined(VK_QNX_screen_surface)) - fp_vkCreateScreenSurfaceQNX = reinterpret_cast(procAddr(instance, "vkCreateScreenSurfaceQNX")); + fp_vkCreateScreenSurfaceQNX = + reinterpret_cast(procAddr(instance, "vkCreateScreenSurfaceQNX")); #endif #if (defined(VK_QNX_screen_surface)) - fp_vkGetPhysicalDeviceScreenPresentationSupportQNX = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX")); + fp_vkGetPhysicalDeviceScreenPresentationSupportQNX = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX")); #endif #if (defined(VK_EXT_debug_report)) - fp_vkCreateDebugReportCallbackEXT = reinterpret_cast(procAddr(instance, "vkCreateDebugReportCallbackEXT")); + fp_vkCreateDebugReportCallbackEXT = + reinterpret_cast(procAddr(instance, "vkCreateDebugReportCallbackEXT")); #endif #if (defined(VK_EXT_debug_report)) - fp_vkDestroyDebugReportCallbackEXT = reinterpret_cast(procAddr(instance, "vkDestroyDebugReportCallbackEXT")); + fp_vkDestroyDebugReportCallbackEXT = reinterpret_cast( + procAddr(instance, "vkDestroyDebugReportCallbackEXT")); #endif #if (defined(VK_EXT_debug_report)) - fp_vkDebugReportMessageEXT = reinterpret_cast(procAddr(instance, "vkDebugReportMessageEXT")); + fp_vkDebugReportMessageEXT = + reinterpret_cast(procAddr(instance, "vkDebugReportMessageEXT")); #endif #if (defined(VK_NV_external_memory_capabilities)) - fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV")); + fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceFeatures2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures2")); + fp_vkGetPhysicalDeviceFeatures2 = + reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties2")); + fp_vkGetPhysicalDeviceProperties2 = + reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceFormatProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFormatProperties2")); + fp_vkGetPhysicalDeviceFormatProperties2 = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceFormatProperties2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceImageFormatProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties2")); + fp_vkGetPhysicalDeviceImageFormatProperties2 = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceQueueFamilyProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties2")); + fp_vkGetPhysicalDeviceQueueFamilyProperties2 = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceMemoryProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMemoryProperties2")); + fp_vkGetPhysicalDeviceMemoryProperties2 = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceMemoryProperties2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceSparseImageFormatProperties2 = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2")); + fp_vkGetPhysicalDeviceSparseImageFormatProperties2 = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceExternalBufferProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalBufferProperties")); + fp_vkGetPhysicalDeviceExternalBufferProperties = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalBufferProperties")); #endif #if (defined(VK_NV_external_memory_sci_buf)) - fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV")); + fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV")); #endif #if (defined(VK_NV_external_memory_sci_buf)) - fp_vkGetPhysicalDeviceSciBufAttributesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSciBufAttributesNV")); + fp_vkGetPhysicalDeviceSciBufAttributesNV = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSciBufAttributesNV")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceExternalSemaphoreProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalSemaphoreProperties")); + fp_vkGetPhysicalDeviceExternalSemaphoreProperties = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalSemaphoreProperties")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetPhysicalDeviceExternalFenceProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalFenceProperties")); + fp_vkGetPhysicalDeviceExternalFenceProperties = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalFenceProperties")); #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - fp_vkGetPhysicalDeviceSciSyncAttributesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSciSyncAttributesNV")); + fp_vkGetPhysicalDeviceSciSyncAttributesNV = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSciSyncAttributesNV")); #endif #if (defined(VK_EXT_direct_mode_display)) fp_vkReleaseDisplayEXT = reinterpret_cast(procAddr(instance, "vkReleaseDisplayEXT")); #endif #if (defined(VK_EXT_acquire_xlib_display)) - fp_vkAcquireXlibDisplayEXT = reinterpret_cast(procAddr(instance, "vkAcquireXlibDisplayEXT")); + fp_vkAcquireXlibDisplayEXT = + reinterpret_cast(procAddr(instance, "vkAcquireXlibDisplayEXT")); #endif #if (defined(VK_EXT_acquire_xlib_display)) - fp_vkGetRandROutputDisplayEXT = reinterpret_cast(procAddr(instance, "vkGetRandROutputDisplayEXT")); + fp_vkGetRandROutputDisplayEXT = + reinterpret_cast(procAddr(instance, "vkGetRandROutputDisplayEXT")); #endif #if (defined(VK_NV_acquire_winrt_display)) - fp_vkAcquireWinrtDisplayNV = reinterpret_cast(procAddr(instance, "vkAcquireWinrtDisplayNV")); + fp_vkAcquireWinrtDisplayNV = + reinterpret_cast(procAddr(instance, "vkAcquireWinrtDisplayNV")); #endif #if (defined(VK_NV_acquire_winrt_display)) fp_vkGetWinrtDisplayNV = reinterpret_cast(procAddr(instance, "vkGetWinrtDisplayNV")); #endif #if (defined(VK_EXT_display_surface_counter)) - fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT")); + fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkEnumeratePhysicalDeviceGroups = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDeviceGroups")); + fp_vkEnumeratePhysicalDeviceGroups = reinterpret_cast( + procAddr(instance, "vkEnumeratePhysicalDeviceGroups")); #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - fp_vkGetPhysicalDevicePresentRectanglesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDevicePresentRectanglesKHR")); + fp_vkGetPhysicalDevicePresentRectanglesKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDevicePresentRectanglesKHR")); #endif #if (defined(VK_MVK_ios_surface)) - fp_vkCreateIOSSurfaceMVK = reinterpret_cast(procAddr(instance, "vkCreateIOSSurfaceMVK")); + fp_vkCreateIOSSurfaceMVK = + reinterpret_cast(procAddr(instance, "vkCreateIOSSurfaceMVK")); #endif #if (defined(VK_MVK_macos_surface)) - fp_vkCreateMacOSSurfaceMVK = reinterpret_cast(procAddr(instance, "vkCreateMacOSSurfaceMVK")); + fp_vkCreateMacOSSurfaceMVK = + reinterpret_cast(procAddr(instance, "vkCreateMacOSSurfaceMVK")); #endif #if (defined(VK_EXT_metal_surface)) - fp_vkCreateMetalSurfaceEXT = reinterpret_cast(procAddr(instance, "vkCreateMetalSurfaceEXT")); + fp_vkCreateMetalSurfaceEXT = + reinterpret_cast(procAddr(instance, "vkCreateMetalSurfaceEXT")); #endif #if (defined(VK_EXT_sample_locations)) - fp_vkGetPhysicalDeviceMultisamplePropertiesEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT")); + fp_vkGetPhysicalDeviceMultisamplePropertiesEXT = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT")); #endif #if (defined(VK_KHR_get_surface_capabilities2)) - fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR")); + fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR")); #endif #if (defined(VK_KHR_get_surface_capabilities2)) - fp_vkGetPhysicalDeviceSurfaceFormats2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR")); + fp_vkGetPhysicalDeviceSurfaceFormats2KHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR")); #endif #if (defined(VK_KHR_get_display_properties2)) - fp_vkGetPhysicalDeviceDisplayProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayProperties2KHR")); + fp_vkGetPhysicalDeviceDisplayProperties2KHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceDisplayProperties2KHR")); #endif #if (defined(VK_KHR_get_display_properties2)) - fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR")); + fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR")); #endif #if (defined(VK_KHR_get_display_properties2)) - fp_vkGetDisplayModeProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetDisplayModeProperties2KHR")); + fp_vkGetDisplayModeProperties2KHR = + reinterpret_cast(procAddr(instance, "vkGetDisplayModeProperties2KHR")); #endif #if (defined(VK_KHR_get_display_properties2)) - fp_vkGetDisplayPlaneCapabilities2KHR = reinterpret_cast(procAddr(instance, "vkGetDisplayPlaneCapabilities2KHR")); + fp_vkGetDisplayPlaneCapabilities2KHR = reinterpret_cast( + procAddr(instance, "vkGetDisplayPlaneCapabilities2KHR")); #endif #if (defined(VK_KHR_calibrated_timestamps)) - fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR")); + fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkCreateDebugUtilsMessengerEXT = reinterpret_cast(procAddr(instance, "vkCreateDebugUtilsMessengerEXT")); + fp_vkCreateDebugUtilsMessengerEXT = + reinterpret_cast(procAddr(instance, "vkCreateDebugUtilsMessengerEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkDestroyDebugUtilsMessengerEXT = reinterpret_cast(procAddr(instance, "vkDestroyDebugUtilsMessengerEXT")); + fp_vkDestroyDebugUtilsMessengerEXT = reinterpret_cast( + procAddr(instance, "vkDestroyDebugUtilsMessengerEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkSubmitDebugUtilsMessageEXT = reinterpret_cast(procAddr(instance, "vkSubmitDebugUtilsMessageEXT")); + fp_vkSubmitDebugUtilsMessageEXT = + reinterpret_cast(procAddr(instance, "vkSubmitDebugUtilsMessageEXT")); #endif #if (defined(VK_NV_cooperative_matrix)) - fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV")); + fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV")); #endif #if (defined(VK_EXT_full_screen_exclusive)) - fp_vkGetPhysicalDeviceSurfacePresentModes2EXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT")); + fp_vkGetPhysicalDeviceSurfacePresentModes2EXT = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT")); #endif #if (defined(VK_KHR_performance_query)) - fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR")); + fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = + reinterpret_cast( + procAddr(instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR")); #endif #if (defined(VK_KHR_performance_query)) - fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR")); + fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR")); #endif #if (defined(VK_EXT_headless_surface)) - fp_vkCreateHeadlessSurfaceEXT = reinterpret_cast(procAddr(instance, "vkCreateHeadlessSurfaceEXT")); + fp_vkCreateHeadlessSurfaceEXT = + reinterpret_cast(procAddr(instance, "vkCreateHeadlessSurfaceEXT")); #endif #if (defined(VK_NV_coverage_reduction_mode)) - fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV")); + fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkGetPhysicalDeviceToolProperties = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceToolProperties")); + fp_vkGetPhysicalDeviceToolProperties = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceToolProperties")); #endif #if (defined(VK_KHR_object_refresh)) - fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceRefreshableObjectTypesKHR")); + fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceRefreshableObjectTypesKHR")); #endif #if (defined(VK_KHR_fragment_shading_rate)) - fp_vkGetPhysicalDeviceFragmentShadingRatesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR")); + fp_vkGetPhysicalDeviceFragmentShadingRatesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkGetPhysicalDeviceVideoCapabilitiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR")); + fp_vkGetPhysicalDeviceVideoCapabilitiesKHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR")); + fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR")); #endif #if (defined(VK_KHR_video_encode_queue)) - fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR")); + fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR")); #endif #if (defined(VK_EXT_acquire_drm_display)) - fp_vkAcquireDrmDisplayEXT = reinterpret_cast(procAddr(instance, "vkAcquireDrmDisplayEXT")); + fp_vkAcquireDrmDisplayEXT = + reinterpret_cast(procAddr(instance, "vkAcquireDrmDisplayEXT")); #endif #if (defined(VK_EXT_acquire_drm_display)) fp_vkGetDrmDisplayEXT = reinterpret_cast(procAddr(instance, "vkGetDrmDisplayEXT")); #endif #if (defined(VK_NV_optical_flow)) - fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV")); + fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV")); #endif #if (defined(VK_KHR_cooperative_matrix)) - fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR")); + fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFeatures2KHR")); + fp_vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceFeatures2KHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceProperties2KHR")); + fp_vkGetPhysicalDeviceProperties2KHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceProperties2KHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceFormatProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceFormatProperties2KHR")); + fp_vkGetPhysicalDeviceFormatProperties2KHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceFormatProperties2KHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceImageFormatProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties2KHR")); + fp_vkGetPhysicalDeviceImageFormatProperties2KHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceImageFormatProperties2KHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR")); + fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceMemoryProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceMemoryProperties2KHR")); + fp_vkGetPhysicalDeviceMemoryProperties2KHR = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceMemoryProperties2KHR")); #endif #if (defined(VK_KHR_get_physical_device_properties2)) - fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR")); + fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR")); #endif #if (defined(VK_KHR_external_memory_capabilities)) - fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR")); + fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR")); #endif #if (defined(VK_KHR_external_semaphore_capabilities)) - fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR")); + fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR")); #endif #if (defined(VK_KHR_external_fence_capabilities)) - fp_vkGetPhysicalDeviceExternalFencePropertiesKHR = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR")); + fp_vkGetPhysicalDeviceExternalFencePropertiesKHR = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR")); #endif #if (defined(VK_KHR_device_group_creation)) - fp_vkEnumeratePhysicalDeviceGroupsKHR = reinterpret_cast(procAddr(instance, "vkEnumeratePhysicalDeviceGroupsKHR")); + fp_vkEnumeratePhysicalDeviceGroupsKHR = reinterpret_cast( + procAddr(instance, "vkEnumeratePhysicalDeviceGroupsKHR")); #endif #if (defined(VK_EXT_calibrated_timestamps)) - fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT")); + fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT")); #endif #if (defined(VK_EXT_tooling_info)) - fp_vkGetPhysicalDeviceToolPropertiesEXT = reinterpret_cast(procAddr(instance, "vkGetPhysicalDeviceToolPropertiesEXT")); + fp_vkGetPhysicalDeviceToolPropertiesEXT = reinterpret_cast( + procAddr(instance, "vkGetPhysicalDeviceToolPropertiesEXT")); #endif } - void destroyInstance(const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyInstance(const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyInstance(instance, pAllocator); } - VkResult enumeratePhysicalDevices(uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const noexcept { + VkResult enumeratePhysicalDevices(uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const noexcept + { return fp_vkEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices); } - PFN_vkVoidFunction getInstanceProcAddr(const char* pName) const noexcept { + PFN_vkVoidFunction getInstanceProcAddr(const char* pName) const noexcept + { return fp_vkGetInstanceProcAddr(instance, pName); } - void getPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) const noexcept { + void getPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties) const noexcept + { fp_vkGetPhysicalDeviceProperties(physicalDevice, pProperties); } - void getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) const noexcept { + void getPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties) const noexcept + { fp_vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); } - void getPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) const noexcept { + void getPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties) const noexcept + { fp_vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); } - void getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const noexcept { + void getPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const noexcept + { fp_vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures); } - void getPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) const noexcept { + void getPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties) const noexcept + { fp_vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties); } - VkResult getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) const noexcept { - return fp_vkGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); - } - VkResult enumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties) const noexcept { + VkResult getPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties) const noexcept + { + return fp_vkGetPhysicalDeviceImageFormatProperties( + physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); + } + VkResult enumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties) const noexcept + { return fp_vkEnumerateDeviceLayerProperties(physicalDevice, pPropertyCount, pProperties); } - VkResult enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) const noexcept { + VkResult enumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties) const noexcept + { return fp_vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pPropertyCount, pProperties); } - void getPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) const noexcept { - fp_vkGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); + void getPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties) const noexcept + { + fp_vkGetPhysicalDeviceSparseImageFormatProperties( + physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); } #if (defined(VK_KHR_android_surface)) - VkResult createAndroidSurfaceKHR(const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createAndroidSurfaceKHR(const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_display)) - VkResult getPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties) const noexcept { + VkResult getPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties) const noexcept + { return fp_vkGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_display)) - VkResult getPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties) const noexcept { + VkResult getPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties) const noexcept + { return fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_display)) - VkResult getDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) const noexcept { + VkResult getDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays) const noexcept + { return fp_vkGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays); } #endif #if (defined(VK_KHR_display)) - VkResult getDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties) const noexcept { + VkResult getDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties) const noexcept + { return fp_vkGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_display)) - VkResult createDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) const noexcept { + VkResult createDisplayModeKHR(VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode) const noexcept + { return fp_vkCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode); } #endif #if (defined(VK_KHR_display)) - VkResult getDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities) const noexcept { + VkResult getDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities) const noexcept + { return fp_vkGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities); } #endif #if (defined(VK_KHR_display)) - VkResult createDisplayPlaneSurfaceKHR(const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createDisplayPlaneSurfaceKHR(const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_surface)) - void destroySurfaceKHR(VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySurfaceKHR(VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySurfaceKHR(instance, surface, pAllocator); } #endif #if (defined(VK_KHR_surface)) - VkResult getPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported) const noexcept { + VkResult getPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported) const noexcept + { return fp_vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); } #endif #if (defined(VK_KHR_surface)) - VkResult getPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) const noexcept { + VkResult getPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) const noexcept + { return fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); } #endif #if (defined(VK_KHR_surface)) - VkResult getPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats) const noexcept { + VkResult getPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats) const noexcept + { return fp_vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); } #endif #if (defined(VK_KHR_surface)) - VkResult getPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) const noexcept { + VkResult getPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes) const noexcept + { return fp_vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); } #endif #if (defined(VK_NN_vi_surface)) - VkResult createViSurfaceNN(const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createViSurfaceNN(const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_wayland_surface)) - VkResult createWaylandSurfaceKHR(const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createWaylandSurfaceKHR(const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_wayland_surface)) - VkBool32 getPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) const noexcept { + VkBool32 getPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display) const noexcept + { return fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display); } #endif #if (defined(VK_KHR_win32_surface)) - VkResult createWin32SurfaceKHR(const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createWin32SurfaceKHR(const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_win32_surface)) - VkBool32 getPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) const noexcept { + VkBool32 getPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex) const noexcept + { return fp_vkGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex); } #endif #if (defined(VK_KHR_xlib_surface)) - VkResult createXlibSurfaceKHR(const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createXlibSurfaceKHR(const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_xlib_surface)) - VkBool32 getPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) const noexcept { + VkBool32 getPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID) const noexcept + { return fp_vkGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID); } #endif #if (defined(VK_KHR_xcb_surface)) - VkResult createXcbSurfaceKHR(const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createXcbSurfaceKHR(const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_KHR_xcb_surface)) - VkBool32 getPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) const noexcept { + VkBool32 getPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id) const noexcept + { return fp_vkGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id); } #endif #if (defined(VK_EXT_directfb_surface)) - VkResult createDirectFBSurfaceEXT(const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createDirectFBSurfaceEXT(const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateDirectFBSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_EXT_directfb_surface)) - VkBool32 getPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb) const noexcept { + VkBool32 getPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb) const noexcept + { return fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT(physicalDevice, queueFamilyIndex, dfb); } #endif #if (defined(VK_FUCHSIA_imagepipe_surface)) - VkResult createImagePipeSurfaceFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createImagePipeSurfaceFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_GGP_stream_descriptor_surface)) - VkResult createStreamDescriptorSurfaceGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createStreamDescriptorSurfaceGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_QNX_screen_surface)) - VkResult createScreenSurfaceQNX(const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createScreenSurfaceQNX(const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateScreenSurfaceQNX(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_QNX_screen_surface)) - VkBool32 getPhysicalDeviceScreenPresentationSupportQNX(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window) const noexcept { + VkBool32 getPhysicalDeviceScreenPresentationSupportQNX(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window) const noexcept + { return fp_vkGetPhysicalDeviceScreenPresentationSupportQNX(physicalDevice, queueFamilyIndex, window); } #endif #if (defined(VK_EXT_debug_report)) - VkResult createDebugReportCallbackEXT(const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback) const noexcept { + VkResult createDebugReportCallbackEXT(const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback) const noexcept + { return fp_vkCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback); } #endif #if (defined(VK_EXT_debug_report)) - void destroyDebugReportCallbackEXT(VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDebugReportCallbackEXT(VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDebugReportCallbackEXT(instance, callback, pAllocator); } #endif #if (defined(VK_EXT_debug_report)) - void debugReportMessageEXT(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) const noexcept { + void debugReportMessageEXT(VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage) const noexcept + { fp_vkDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage); } #endif #if (defined(VK_NV_external_memory_capabilities)) - VkResult getPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) const noexcept { - return fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties); + VkResult getPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) const noexcept + { + return fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures) const noexcept { + void getPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2KHR* pFeatures) const noexcept + { fp_vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties) const noexcept { + void getPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2KHR* pProperties) const noexcept + { fp_vkGetPhysicalDeviceProperties2(physicalDevice, pProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) const noexcept { + void getPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2KHR* pFormatProperties) const noexcept + { fp_vkGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties); } #endif #if (defined(VK_VERSION_1_1)) - VkResult getPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties) const noexcept { + VkResult getPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, + VkImageFormatProperties2KHR* pImageFormatProperties) const noexcept + { return fp_vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const noexcept { + void getPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const noexcept + { fp_vkGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties) const noexcept { + void getPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties) const noexcept + { fp_vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties) const noexcept { + void getPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2KHR* pProperties) const noexcept + { fp_vkGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties) const noexcept { + void + getPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, + VkExternalBufferPropertiesKHR* pExternalBufferProperties) const noexcept + { fp_vkGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); } #endif #if (defined(VK_NV_external_memory_sci_buf)) - VkResult getPhysicalDeviceExternalMemorySciBufPropertiesNV(VkPhysicalDevice physicalDevice, VkExternalMemoryHandleTypeFlagBitsKHR handleType, NvSciBufObj handle, VkMemorySciBufPropertiesNV* pMemorySciBufProperties) const noexcept { - return fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV(physicalDevice, handleType, handle, pMemorySciBufProperties); + VkResult getPhysicalDeviceExternalMemorySciBufPropertiesNV( + VkPhysicalDevice physicalDevice, + VkExternalMemoryHandleTypeFlagBitsKHR handleType, + NvSciBufObj handle, + VkMemorySciBufPropertiesNV* pMemorySciBufProperties) const noexcept + { + return fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV( + physicalDevice, handleType, handle, pMemorySciBufProperties); } #endif #if (defined(VK_NV_external_memory_sci_buf)) - VkResult getPhysicalDeviceSciBufAttributesNV(VkPhysicalDevice physicalDevice, NvSciBufAttrList pAttributes) const noexcept { + VkResult getPhysicalDeviceSciBufAttributesNV(VkPhysicalDevice physicalDevice, + NvSciBufAttrList pAttributes) const noexcept + { return fp_vkGetPhysicalDeviceSciBufAttributesNV(physicalDevice, pAttributes); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties) const noexcept { - fp_vkGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + void getPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, + VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties) const noexcept + { + fp_vkGetPhysicalDeviceExternalSemaphoreProperties( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); } #endif #if (defined(VK_VERSION_1_1)) - void getPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties) const noexcept { + void getPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, + VkExternalFencePropertiesKHR* pExternalFenceProperties) const noexcept + { fp_vkGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); } #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - VkResult getPhysicalDeviceSciSyncAttributesNV(VkPhysicalDevice physicalDevice, const VkSciSyncAttributesInfoNV* pSciSyncAttributesInfo, NvSciSyncAttrList pAttributes) const noexcept { + VkResult getPhysicalDeviceSciSyncAttributesNV(VkPhysicalDevice physicalDevice, + const VkSciSyncAttributesInfoNV* pSciSyncAttributesInfo, + NvSciSyncAttrList pAttributes) const noexcept + { return fp_vkGetPhysicalDeviceSciSyncAttributesNV(physicalDevice, pSciSyncAttributesInfo, pAttributes); } #endif #if (defined(VK_EXT_direct_mode_display)) - VkResult releaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const noexcept { + VkResult releaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const noexcept + { return fp_vkReleaseDisplayEXT(physicalDevice, display); } #endif #if (defined(VK_EXT_acquire_xlib_display)) - VkResult acquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) const noexcept { + VkResult acquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) const noexcept + { return fp_vkAcquireXlibDisplayEXT(physicalDevice, dpy, display); } #endif #if (defined(VK_EXT_acquire_xlib_display)) - VkResult getRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay) const noexcept { + VkResult getRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay) const noexcept + { return fp_vkGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay); } #endif #if (defined(VK_NV_acquire_winrt_display)) - VkResult acquireWinrtDisplayNV(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const noexcept { + VkResult acquireWinrtDisplayNV(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const noexcept + { return fp_vkAcquireWinrtDisplayNV(physicalDevice, display); } #endif #if (defined(VK_NV_acquire_winrt_display)) - VkResult getWinrtDisplayNV(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay) const noexcept { + VkResult + getWinrtDisplayNV(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay) const noexcept + { return fp_vkGetWinrtDisplayNV(physicalDevice, deviceRelativeId, pDisplay); } #endif #if (defined(VK_EXT_display_surface_counter)) - VkResult getPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities) const noexcept { + VkResult getPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities) const noexcept + { return fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities); } #endif #if (defined(VK_VERSION_1_1)) - VkResult enumeratePhysicalDeviceGroups(uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties) const noexcept { + VkResult + enumeratePhysicalDeviceGroups(uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties) const noexcept + { return fp_vkEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); } #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - VkResult getPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects) const noexcept { + VkResult getPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects) const noexcept + { return fp_vkGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects); } #endif #if (defined(VK_MVK_ios_surface)) - VkResult createIOSSurfaceMVK(const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createIOSSurfaceMVK(const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_MVK_macos_surface)) - VkResult createMacOSSurfaceMVK(const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createMacOSSurfaceMVK(const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_EXT_metal_surface)) - VkResult createMetalSurfaceEXT(const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createMetalSurfaceEXT(const VkMetalSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_EXT_sample_locations)) - void getPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) const noexcept { + void getPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties) const noexcept + { fp_vkGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties); } #endif #if (defined(VK_KHR_get_surface_capabilities2)) - VkResult getPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities) const noexcept { + VkResult getPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities) const noexcept + { return fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities); } #endif #if (defined(VK_KHR_get_surface_capabilities2)) - VkResult getPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats) const noexcept { - return fp_vkGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); + VkResult getPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats) const noexcept + { + return fp_vkGetPhysicalDeviceSurfaceFormats2KHR( + physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); } #endif #if (defined(VK_KHR_get_display_properties2)) - VkResult getPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties) const noexcept { + VkResult getPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties) const noexcept + { return fp_vkGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_get_display_properties2)) - VkResult getPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties) const noexcept { + VkResult getPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties) const noexcept + { return fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_get_display_properties2)) - VkResult getDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties) const noexcept { + VkResult getDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties) const noexcept + { return fp_vkGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_get_display_properties2)) - VkResult getDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities) const noexcept { + VkResult getDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities) const noexcept + { return fp_vkGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities); } #endif #if (defined(VK_KHR_calibrated_timestamps)) - VkResult getPhysicalDeviceCalibrateableTimeDomainsKHR(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) const noexcept { + VkResult getPhysicalDeviceCalibrateableTimeDomainsKHR(VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainEXT* pTimeDomains) const noexcept + { return fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR(physicalDevice, pTimeDomainCount, pTimeDomains); } #endif #if (defined(VK_EXT_debug_utils)) - VkResult createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) const noexcept { + VkResult createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger) const noexcept + { return fp_vkCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger); } #endif #if (defined(VK_EXT_debug_utils)) - void destroyDebugUtilsMessengerEXT(VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDebugUtilsMessengerEXT(VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator); } #endif #if (defined(VK_EXT_debug_utils)) - void submitDebugUtilsMessageEXT(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const noexcept { + void submitDebugUtilsMessageEXT(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const noexcept + { fp_vkSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData); } #endif #if (defined(VK_NV_cooperative_matrix)) - VkResult getPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties) const noexcept { + VkResult getPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties) const noexcept + { return fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties); } #endif #if (defined(VK_EXT_full_screen_exclusive)) - VkResult getPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) const noexcept { - return fp_vkGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes); + VkResult getPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes) const noexcept + { + return fp_vkGetPhysicalDeviceSurfacePresentModes2EXT( + physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes); } #endif #if (defined(VK_KHR_performance_query)) - VkResult enumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions) const noexcept { - return fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions); + VkResult enumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions) const noexcept + { + return fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions); } #endif #if (defined(VK_KHR_performance_query)) - void getPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses) const noexcept { - fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses); + void getPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses) const noexcept + { + fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + physicalDevice, pPerformanceQueryCreateInfo, pNumPasses); } #endif #if (defined(VK_EXT_headless_surface)) - VkResult createHeadlessSurfaceEXT(const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const noexcept { + VkResult createHeadlessSurfaceEXT(const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface) const noexcept + { return fp_vkCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface); } #endif #if (defined(VK_NV_coverage_reduction_mode)) - VkResult getPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations) const noexcept { - return fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations); + VkResult getPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations) const noexcept + { + return fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + physicalDevice, pCombinationCount, pCombinations); } #endif #if (defined(VK_VERSION_1_3)) - VkResult getPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const noexcept { + VkResult getPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const noexcept + { return fp_vkGetPhysicalDeviceToolProperties(physicalDevice, pToolCount, pToolProperties); } #endif #if (defined(VK_KHR_object_refresh)) - VkResult getPhysicalDeviceRefreshableObjectTypesKHR(VkPhysicalDevice physicalDevice, uint32_t* pRefreshableObjectTypeCount, VkObjectType* pRefreshableObjectTypes) const noexcept { - return fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR(physicalDevice, pRefreshableObjectTypeCount, pRefreshableObjectTypes); + VkResult getPhysicalDeviceRefreshableObjectTypesKHR(VkPhysicalDevice physicalDevice, + uint32_t* pRefreshableObjectTypeCount, + VkObjectType* pRefreshableObjectTypes) const noexcept + { + return fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR( + physicalDevice, pRefreshableObjectTypeCount, pRefreshableObjectTypes); } #endif #if (defined(VK_KHR_fragment_shading_rate)) - VkResult getPhysicalDeviceFragmentShadingRatesKHR(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates) const noexcept { - return fp_vkGetPhysicalDeviceFragmentShadingRatesKHR(physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates); + VkResult getPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates) const noexcept + { + return fp_vkGetPhysicalDeviceFragmentShadingRatesKHR( + physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates); } #endif #if (defined(VK_KHR_video_queue)) - VkResult getPhysicalDeviceVideoCapabilitiesKHR(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities) const noexcept { + VkResult getPhysicalDeviceVideoCapabilitiesKHR(VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities) const noexcept + { return fp_vkGetPhysicalDeviceVideoCapabilitiesKHR(physicalDevice, pVideoProfile, pCapabilities); } #endif #if (defined(VK_KHR_video_queue)) - VkResult getPhysicalDeviceVideoFormatPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties) const noexcept { - return fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR(physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties); + VkResult + getPhysicalDeviceVideoFormatPropertiesKHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties) const noexcept + { + return fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR( + physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties); } #endif #if (defined(VK_KHR_video_encode_queue)) - VkResult getPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties) const noexcept { - return fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(physicalDevice, pQualityLevelInfo, pQualityLevelProperties); + VkResult getPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties) const noexcept + { + return fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + physicalDevice, pQualityLevelInfo, pQualityLevelProperties); } #endif #if (defined(VK_EXT_acquire_drm_display)) - VkResult acquireDrmDisplayEXT(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display) const noexcept { + VkResult acquireDrmDisplayEXT(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display) const noexcept + { return fp_vkAcquireDrmDisplayEXT(physicalDevice, drmFd, display); } #endif #if (defined(VK_EXT_acquire_drm_display)) - VkResult getDrmDisplayEXT(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display) const noexcept { + VkResult getDrmDisplayEXT(VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR* display) const noexcept + { return fp_vkGetDrmDisplayEXT(physicalDevice, drmFd, connectorId, display); } #endif #if (defined(VK_NV_optical_flow)) - VkResult getPhysicalDeviceOpticalFlowImageFormatsNV(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties) const noexcept { - return fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV(physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties); + VkResult getPhysicalDeviceOpticalFlowImageFormatsNV( + VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, + uint32_t* pFormatCount, + VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties) const noexcept + { + return fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties); } #endif #if (defined(VK_KHR_cooperative_matrix)) - VkResult getPhysicalDeviceCooperativeMatrixPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties) const noexcept { + VkResult + getPhysicalDeviceCooperativeMatrixPropertiesKHR(VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesKHR* pProperties) const noexcept + { return fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(physicalDevice, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - void getPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR* pFeatures) const noexcept { + void getPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2KHR* pFeatures) const noexcept + { fp_vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - void getPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR* pProperties) const noexcept { + void getPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2KHR* pProperties) const noexcept + { fp_vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - void getPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) const noexcept { + void getPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2KHR* pFormatProperties) const noexcept + { fp_vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - VkResult getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, VkImageFormatProperties2KHR* pImageFormatProperties) const noexcept { - return fp_vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties); + VkResult + getPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, + VkImageFormatProperties2KHR* pImageFormatProperties) const noexcept + { + return fp_vkGetPhysicalDeviceImageFormatProperties2KHR( + physicalDevice, pImageFormatInfo, pImageFormatProperties); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - void getPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const noexcept { - fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + void getPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const noexcept + { + fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - void getPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties) const noexcept { + void getPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2KHR* pMemoryProperties) const noexcept + { fp_vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties); } #endif #if (defined(VK_KHR_get_physical_device_properties2)) - void getPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2KHR* pProperties) const noexcept { + void getPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2KHR* pProperties) const noexcept + { fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties); } #endif #if (defined(VK_KHR_external_memory_capabilities)) - void getPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, VkExternalBufferPropertiesKHR* pExternalBufferProperties) const noexcept { - fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + void getPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfoKHR* pExternalBufferInfo, + VkExternalBufferPropertiesKHR* pExternalBufferProperties) const noexcept + { + fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR( + physicalDevice, pExternalBufferInfo, pExternalBufferProperties); } #endif #if (defined(VK_KHR_external_semaphore_capabilities)) - void getPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties) const noexcept { - fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + void getPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfoKHR* pExternalSemaphoreInfo, + VkExternalSemaphorePropertiesKHR* pExternalSemaphoreProperties) const noexcept + { + fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); } #endif #if (defined(VK_KHR_external_fence_capabilities)) - void getPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, VkExternalFencePropertiesKHR* pExternalFenceProperties) const noexcept { + void + getPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfoKHR* pExternalFenceInfo, + VkExternalFencePropertiesKHR* pExternalFenceProperties) const noexcept + { fp_vkGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); } #endif #if (defined(VK_KHR_device_group_creation)) - VkResult enumeratePhysicalDeviceGroupsKHR(uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties) const noexcept { - return fp_vkEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + VkResult + enumeratePhysicalDeviceGroupsKHR(uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties) const noexcept + { + return fp_vkEnumeratePhysicalDeviceGroupsKHR( + instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); } #endif #if (defined(VK_EXT_calibrated_timestamps)) - VkResult getPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) const noexcept { + VkResult getPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainEXT* pTimeDomains) const noexcept + { return fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains); } #endif #if (defined(VK_EXT_tooling_info)) - VkResult getPhysicalDeviceToolPropertiesEXT(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const noexcept { + VkResult getPhysicalDeviceToolPropertiesEXT(VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const noexcept + { return fp_vkGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties); } #endif - PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; - PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; - PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; - PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; - PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr; - PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; - PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; - PFN_vkGetPhysicalDeviceFormatProperties fp_vkGetPhysicalDeviceFormatProperties = nullptr; - PFN_vkGetPhysicalDeviceImageFormatProperties fp_vkGetPhysicalDeviceImageFormatProperties = nullptr; - PFN_vkEnumerateDeviceLayerProperties fp_vkEnumerateDeviceLayerProperties = nullptr; - PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; + PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; + PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; + PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr; + PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr; + PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; + PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; + PFN_vkGetPhysicalDeviceFormatProperties fp_vkGetPhysicalDeviceFormatProperties = nullptr; + PFN_vkGetPhysicalDeviceImageFormatProperties fp_vkGetPhysicalDeviceImageFormatProperties = nullptr; + PFN_vkEnumerateDeviceLayerProperties fp_vkEnumerateDeviceLayerProperties = nullptr; + PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; PFN_vkGetPhysicalDeviceSparseImageFormatProperties fp_vkGetPhysicalDeviceSparseImageFormatProperties = nullptr; #if (defined(VK_KHR_android_surface)) PFN_vkCreateAndroidSurfaceKHR fp_vkCreateAndroidSurfaceKHR = nullptr; #else - void * fp_vkCreateAndroidSurfaceKHR{}; + void* fp_vkCreateAndroidSurfaceKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkGetPhysicalDeviceDisplayPropertiesKHR fp_vkGetPhysicalDeviceDisplayPropertiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceDisplayPropertiesKHR{}; + void* fp_vkGetPhysicalDeviceDisplayPropertiesKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR{}; + void* fp_vkGetPhysicalDeviceDisplayPlanePropertiesKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkGetDisplayPlaneSupportedDisplaysKHR fp_vkGetDisplayPlaneSupportedDisplaysKHR = nullptr; #else - void * fp_vkGetDisplayPlaneSupportedDisplaysKHR{}; + void* fp_vkGetDisplayPlaneSupportedDisplaysKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkGetDisplayModePropertiesKHR fp_vkGetDisplayModePropertiesKHR = nullptr; #else - void * fp_vkGetDisplayModePropertiesKHR{}; + void* fp_vkGetDisplayModePropertiesKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkCreateDisplayModeKHR fp_vkCreateDisplayModeKHR = nullptr; #else - void * fp_vkCreateDisplayModeKHR{}; + void* fp_vkCreateDisplayModeKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkGetDisplayPlaneCapabilitiesKHR fp_vkGetDisplayPlaneCapabilitiesKHR = nullptr; #else - void * fp_vkGetDisplayPlaneCapabilitiesKHR{}; + void* fp_vkGetDisplayPlaneCapabilitiesKHR{}; #endif #if (defined(VK_KHR_display)) PFN_vkCreateDisplayPlaneSurfaceKHR fp_vkCreateDisplayPlaneSurfaceKHR = nullptr; #else - void * fp_vkCreateDisplayPlaneSurfaceKHR{}; + void* fp_vkCreateDisplayPlaneSurfaceKHR{}; #endif #if (defined(VK_KHR_surface)) PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr; #else - void * fp_vkDestroySurfaceKHR{}; + void* fp_vkDestroySurfaceKHR{}; #endif #if (defined(VK_KHR_surface)) PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfaceSupportKHR{}; + void* fp_vkGetPhysicalDeviceSurfaceSupportKHR{}; #endif #if (defined(VK_KHR_surface)) PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR{}; + void* fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR{}; #endif #if (defined(VK_KHR_surface)) PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfaceFormatsKHR{}; + void* fp_vkGetPhysicalDeviceSurfaceFormatsKHR{}; #endif #if (defined(VK_KHR_surface)) PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fp_vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfacePresentModesKHR{}; + void* fp_vkGetPhysicalDeviceSurfacePresentModesKHR{}; #endif #if (defined(VK_NN_vi_surface)) PFN_vkCreateViSurfaceNN fp_vkCreateViSurfaceNN = nullptr; #else - void * fp_vkCreateViSurfaceNN{}; + void* fp_vkCreateViSurfaceNN{}; #endif #if (defined(VK_KHR_wayland_surface)) PFN_vkCreateWaylandSurfaceKHR fp_vkCreateWaylandSurfaceKHR = nullptr; #else - void * fp_vkCreateWaylandSurfaceKHR{}; + void* fp_vkCreateWaylandSurfaceKHR{}; #endif #if (defined(VK_KHR_wayland_surface)) PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR{}; + void* fp_vkGetPhysicalDeviceWaylandPresentationSupportKHR{}; #endif #if (defined(VK_KHR_win32_surface)) PFN_vkCreateWin32SurfaceKHR fp_vkCreateWin32SurfaceKHR = nullptr; #else - void * fp_vkCreateWin32SurfaceKHR{}; + void* fp_vkCreateWin32SurfaceKHR{}; #endif #if (defined(VK_KHR_win32_surface)) PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR fp_vkGetPhysicalDeviceWin32PresentationSupportKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceWin32PresentationSupportKHR{}; + void* fp_vkGetPhysicalDeviceWin32PresentationSupportKHR{}; #endif #if (defined(VK_KHR_xlib_surface)) PFN_vkCreateXlibSurfaceKHR fp_vkCreateXlibSurfaceKHR = nullptr; #else - void * fp_vkCreateXlibSurfaceKHR{}; + void* fp_vkCreateXlibSurfaceKHR{}; #endif #if (defined(VK_KHR_xlib_surface)) PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR fp_vkGetPhysicalDeviceXlibPresentationSupportKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceXlibPresentationSupportKHR{}; + void* fp_vkGetPhysicalDeviceXlibPresentationSupportKHR{}; #endif #if (defined(VK_KHR_xcb_surface)) PFN_vkCreateXcbSurfaceKHR fp_vkCreateXcbSurfaceKHR = nullptr; #else - void * fp_vkCreateXcbSurfaceKHR{}; + void* fp_vkCreateXcbSurfaceKHR{}; #endif #if (defined(VK_KHR_xcb_surface)) PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR fp_vkGetPhysicalDeviceXcbPresentationSupportKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceXcbPresentationSupportKHR{}; + void* fp_vkGetPhysicalDeviceXcbPresentationSupportKHR{}; #endif #if (defined(VK_EXT_directfb_surface)) PFN_vkCreateDirectFBSurfaceEXT fp_vkCreateDirectFBSurfaceEXT = nullptr; #else - void * fp_vkCreateDirectFBSurfaceEXT{}; + void* fp_vkCreateDirectFBSurfaceEXT{}; #endif #if (defined(VK_EXT_directfb_surface)) - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = nullptr; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + nullptr; #else - void * fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT{}; + void* fp_vkGetPhysicalDeviceDirectFBPresentationSupportEXT{}; #endif #if (defined(VK_FUCHSIA_imagepipe_surface)) PFN_vkCreateImagePipeSurfaceFUCHSIA fp_vkCreateImagePipeSurfaceFUCHSIA = nullptr; #else - void * fp_vkCreateImagePipeSurfaceFUCHSIA{}; + void* fp_vkCreateImagePipeSurfaceFUCHSIA{}; #endif #if (defined(VK_GGP_stream_descriptor_surface)) PFN_vkCreateStreamDescriptorSurfaceGGP fp_vkCreateStreamDescriptorSurfaceGGP = nullptr; #else - void * fp_vkCreateStreamDescriptorSurfaceGGP{}; + void* fp_vkCreateStreamDescriptorSurfaceGGP{}; #endif #if (defined(VK_QNX_screen_surface)) PFN_vkCreateScreenSurfaceQNX fp_vkCreateScreenSurfaceQNX = nullptr; #else - void * fp_vkCreateScreenSurfaceQNX{}; + void* fp_vkCreateScreenSurfaceQNX{}; #endif #if (defined(VK_QNX_screen_surface)) PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX fp_vkGetPhysicalDeviceScreenPresentationSupportQNX = nullptr; #else - void * fp_vkGetPhysicalDeviceScreenPresentationSupportQNX{}; + void* fp_vkGetPhysicalDeviceScreenPresentationSupportQNX{}; #endif #if (defined(VK_EXT_debug_report)) PFN_vkCreateDebugReportCallbackEXT fp_vkCreateDebugReportCallbackEXT = nullptr; #else - void * fp_vkCreateDebugReportCallbackEXT{}; + void* fp_vkCreateDebugReportCallbackEXT{}; #endif #if (defined(VK_EXT_debug_report)) PFN_vkDestroyDebugReportCallbackEXT fp_vkDestroyDebugReportCallbackEXT = nullptr; #else - void * fp_vkDestroyDebugReportCallbackEXT{}; + void* fp_vkDestroyDebugReportCallbackEXT{}; #endif #if (defined(VK_EXT_debug_report)) PFN_vkDebugReportMessageEXT fp_vkDebugReportMessageEXT = nullptr; #else - void * fp_vkDebugReportMessageEXT{}; + void* fp_vkDebugReportMessageEXT{}; #endif #if (defined(VK_NV_external_memory_capabilities)) - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = nullptr; + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + nullptr; #else - void * fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV{}; + void* fp_vkGetPhysicalDeviceExternalImageFormatPropertiesNV{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr; #else - void * fp_vkGetPhysicalDeviceFeatures2{}; + void* fp_vkGetPhysicalDeviceFeatures2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceProperties2 fp_vkGetPhysicalDeviceProperties2 = nullptr; #else - void * fp_vkGetPhysicalDeviceProperties2{}; + void* fp_vkGetPhysicalDeviceProperties2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceFormatProperties2 fp_vkGetPhysicalDeviceFormatProperties2 = nullptr; #else - void * fp_vkGetPhysicalDeviceFormatProperties2{}; + void* fp_vkGetPhysicalDeviceFormatProperties2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceImageFormatProperties2 fp_vkGetPhysicalDeviceImageFormatProperties2 = nullptr; #else - void * fp_vkGetPhysicalDeviceImageFormatProperties2{}; + void* fp_vkGetPhysicalDeviceImageFormatProperties2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceQueueFamilyProperties2 fp_vkGetPhysicalDeviceQueueFamilyProperties2 = nullptr; #else - void * fp_vkGetPhysicalDeviceQueueFamilyProperties2{}; + void* fp_vkGetPhysicalDeviceQueueFamilyProperties2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceMemoryProperties2 fp_vkGetPhysicalDeviceMemoryProperties2 = nullptr; #else - void * fp_vkGetPhysicalDeviceMemoryProperties2{}; + void* fp_vkGetPhysicalDeviceMemoryProperties2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 fp_vkGetPhysicalDeviceSparseImageFormatProperties2 = nullptr; #else - void * fp_vkGetPhysicalDeviceSparseImageFormatProperties2{}; + void* fp_vkGetPhysicalDeviceSparseImageFormatProperties2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceExternalBufferProperties fp_vkGetPhysicalDeviceExternalBufferProperties = nullptr; #else - void * fp_vkGetPhysicalDeviceExternalBufferProperties{}; + void* fp_vkGetPhysicalDeviceExternalBufferProperties{}; #endif #if (defined(VK_NV_external_memory_sci_buf)) - PFN_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV = nullptr; + PFN_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV = + nullptr; #else - void * fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV{}; + void* fp_vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV{}; #endif #if (defined(VK_NV_external_memory_sci_buf)) PFN_vkGetPhysicalDeviceSciBufAttributesNV fp_vkGetPhysicalDeviceSciBufAttributesNV = nullptr; #else - void * fp_vkGetPhysicalDeviceSciBufAttributesNV{}; + void* fp_vkGetPhysicalDeviceSciBufAttributesNV{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceExternalSemaphoreProperties fp_vkGetPhysicalDeviceExternalSemaphoreProperties = nullptr; #else - void * fp_vkGetPhysicalDeviceExternalSemaphoreProperties{}; + void* fp_vkGetPhysicalDeviceExternalSemaphoreProperties{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetPhysicalDeviceExternalFenceProperties fp_vkGetPhysicalDeviceExternalFenceProperties = nullptr; #else - void * fp_vkGetPhysicalDeviceExternalFenceProperties{}; + void* fp_vkGetPhysicalDeviceExternalFenceProperties{}; #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) PFN_vkGetPhysicalDeviceSciSyncAttributesNV fp_vkGetPhysicalDeviceSciSyncAttributesNV = nullptr; #else - void * fp_vkGetPhysicalDeviceSciSyncAttributesNV{}; + void* fp_vkGetPhysicalDeviceSciSyncAttributesNV{}; #endif #if (defined(VK_EXT_direct_mode_display)) PFN_vkReleaseDisplayEXT fp_vkReleaseDisplayEXT = nullptr; #else - void * fp_vkReleaseDisplayEXT{}; + void* fp_vkReleaseDisplayEXT{}; #endif #if (defined(VK_EXT_acquire_xlib_display)) PFN_vkAcquireXlibDisplayEXT fp_vkAcquireXlibDisplayEXT = nullptr; #else - void * fp_vkAcquireXlibDisplayEXT{}; + void* fp_vkAcquireXlibDisplayEXT{}; #endif #if (defined(VK_EXT_acquire_xlib_display)) PFN_vkGetRandROutputDisplayEXT fp_vkGetRandROutputDisplayEXT = nullptr; #else - void * fp_vkGetRandROutputDisplayEXT{}; + void* fp_vkGetRandROutputDisplayEXT{}; #endif #if (defined(VK_NV_acquire_winrt_display)) PFN_vkAcquireWinrtDisplayNV fp_vkAcquireWinrtDisplayNV = nullptr; #else - void * fp_vkAcquireWinrtDisplayNV{}; + void* fp_vkAcquireWinrtDisplayNV{}; #endif #if (defined(VK_NV_acquire_winrt_display)) PFN_vkGetWinrtDisplayNV fp_vkGetWinrtDisplayNV = nullptr; #else - void * fp_vkGetWinrtDisplayNV{}; + void* fp_vkGetWinrtDisplayNV{}; #endif #if (defined(VK_EXT_display_surface_counter)) PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT{}; + void* fp_vkGetPhysicalDeviceSurfaceCapabilities2EXT{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkEnumeratePhysicalDeviceGroups fp_vkEnumeratePhysicalDeviceGroups = nullptr; #else - void * fp_vkEnumeratePhysicalDeviceGroups{}; + void* fp_vkEnumeratePhysicalDeviceGroups{}; #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) PFN_vkGetPhysicalDevicePresentRectanglesKHR fp_vkGetPhysicalDevicePresentRectanglesKHR = nullptr; #else - void * fp_vkGetPhysicalDevicePresentRectanglesKHR{}; + void* fp_vkGetPhysicalDevicePresentRectanglesKHR{}; #endif #if (defined(VK_MVK_ios_surface)) PFN_vkCreateIOSSurfaceMVK fp_vkCreateIOSSurfaceMVK = nullptr; #else - void * fp_vkCreateIOSSurfaceMVK{}; + void* fp_vkCreateIOSSurfaceMVK{}; #endif #if (defined(VK_MVK_macos_surface)) PFN_vkCreateMacOSSurfaceMVK fp_vkCreateMacOSSurfaceMVK = nullptr; #else - void * fp_vkCreateMacOSSurfaceMVK{}; + void* fp_vkCreateMacOSSurfaceMVK{}; #endif #if (defined(VK_EXT_metal_surface)) PFN_vkCreateMetalSurfaceEXT fp_vkCreateMetalSurfaceEXT = nullptr; #else - void * fp_vkCreateMetalSurfaceEXT{}; + void* fp_vkCreateMetalSurfaceEXT{}; #endif #if (defined(VK_EXT_sample_locations)) PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT fp_vkGetPhysicalDeviceMultisamplePropertiesEXT = nullptr; #else - void * fp_vkGetPhysicalDeviceMultisamplePropertiesEXT{}; + void* fp_vkGetPhysicalDeviceMultisamplePropertiesEXT{}; #endif #if (defined(VK_KHR_get_surface_capabilities2)) PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR{}; + void* fp_vkGetPhysicalDeviceSurfaceCapabilities2KHR{}; #endif #if (defined(VK_KHR_get_surface_capabilities2)) PFN_vkGetPhysicalDeviceSurfaceFormats2KHR fp_vkGetPhysicalDeviceSurfaceFormats2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfaceFormats2KHR{}; + void* fp_vkGetPhysicalDeviceSurfaceFormats2KHR{}; #endif #if (defined(VK_KHR_get_display_properties2)) PFN_vkGetPhysicalDeviceDisplayProperties2KHR fp_vkGetPhysicalDeviceDisplayProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceDisplayProperties2KHR{}; + void* fp_vkGetPhysicalDeviceDisplayProperties2KHR{}; #endif #if (defined(VK_KHR_get_display_properties2)) PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR{}; + void* fp_vkGetPhysicalDeviceDisplayPlaneProperties2KHR{}; #endif #if (defined(VK_KHR_get_display_properties2)) PFN_vkGetDisplayModeProperties2KHR fp_vkGetDisplayModeProperties2KHR = nullptr; #else - void * fp_vkGetDisplayModeProperties2KHR{}; + void* fp_vkGetDisplayModeProperties2KHR{}; #endif #if (defined(VK_KHR_get_display_properties2)) PFN_vkGetDisplayPlaneCapabilities2KHR fp_vkGetDisplayPlaneCapabilities2KHR = nullptr; #else - void * fp_vkGetDisplayPlaneCapabilities2KHR{}; + void* fp_vkGetDisplayPlaneCapabilities2KHR{}; #endif #if (defined(VK_KHR_calibrated_timestamps)) PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR{}; + void* fp_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkCreateDebugUtilsMessengerEXT fp_vkCreateDebugUtilsMessengerEXT = nullptr; #else - void * fp_vkCreateDebugUtilsMessengerEXT{}; + void* fp_vkCreateDebugUtilsMessengerEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkDestroyDebugUtilsMessengerEXT fp_vkDestroyDebugUtilsMessengerEXT = nullptr; #else - void * fp_vkDestroyDebugUtilsMessengerEXT{}; + void* fp_vkDestroyDebugUtilsMessengerEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkSubmitDebugUtilsMessageEXT fp_vkSubmitDebugUtilsMessageEXT = nullptr; #else - void * fp_vkSubmitDebugUtilsMessageEXT{}; + void* fp_vkSubmitDebugUtilsMessageEXT{}; #endif #if (defined(VK_NV_cooperative_matrix)) PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = nullptr; #else - void * fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV{}; + void* fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV{}; #endif #if (defined(VK_EXT_full_screen_exclusive)) PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT fp_vkGetPhysicalDeviceSurfacePresentModes2EXT = nullptr; #else - void * fp_vkGetPhysicalDeviceSurfacePresentModes2EXT{}; + void* fp_vkGetPhysicalDeviceSurfacePresentModes2EXT{}; #endif #if (defined(VK_KHR_performance_query)) - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = nullptr; + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR + fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = nullptr; #else - void * fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR{}; + void* fp_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR{}; #endif #if (defined(VK_KHR_performance_query)) - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = nullptr; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR + fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR{}; + void* fp_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR{}; #endif #if (defined(VK_EXT_headless_surface)) PFN_vkCreateHeadlessSurfaceEXT fp_vkCreateHeadlessSurfaceEXT = nullptr; #else - void * fp_vkCreateHeadlessSurfaceEXT{}; + void* fp_vkCreateHeadlessSurfaceEXT{}; #endif #if (defined(VK_NV_coverage_reduction_mode)) - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = nullptr; + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV + fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = nullptr; #else - void * fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV{}; + void* fp_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkGetPhysicalDeviceToolProperties fp_vkGetPhysicalDeviceToolProperties = nullptr; #else - void * fp_vkGetPhysicalDeviceToolProperties{}; + void* fp_vkGetPhysicalDeviceToolProperties{}; #endif #if (defined(VK_KHR_object_refresh)) PFN_vkGetPhysicalDeviceRefreshableObjectTypesKHR fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR{}; + void* fp_vkGetPhysicalDeviceRefreshableObjectTypesKHR{}; #endif #if (defined(VK_KHR_fragment_shading_rate)) PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR fp_vkGetPhysicalDeviceFragmentShadingRatesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceFragmentShadingRatesKHR{}; + void* fp_vkGetPhysicalDeviceFragmentShadingRatesKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR fp_vkGetPhysicalDeviceVideoCapabilitiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceVideoCapabilitiesKHR{}; + void* fp_vkGetPhysicalDeviceVideoCapabilitiesKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR{}; + void* fp_vkGetPhysicalDeviceVideoFormatPropertiesKHR{}; #endif #if (defined(VK_KHR_video_encode_queue)) - PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = nullptr; + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR + fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR{}; + void* fp_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR{}; #endif #if (defined(VK_EXT_acquire_drm_display)) PFN_vkAcquireDrmDisplayEXT fp_vkAcquireDrmDisplayEXT = nullptr; #else - void * fp_vkAcquireDrmDisplayEXT{}; + void* fp_vkAcquireDrmDisplayEXT{}; #endif #if (defined(VK_EXT_acquire_drm_display)) PFN_vkGetDrmDisplayEXT fp_vkGetDrmDisplayEXT = nullptr; #else - void * fp_vkGetDrmDisplayEXT{}; + void* fp_vkGetDrmDisplayEXT{}; #endif #if (defined(VK_NV_optical_flow)) PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = nullptr; #else - void * fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV{}; + void* fp_vkGetPhysicalDeviceOpticalFlowImageFormatsNV{}; #endif #if (defined(VK_KHR_cooperative_matrix)) - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = nullptr; + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + nullptr; #else - void * fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR{}; + void* fp_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) PFN_vkGetPhysicalDeviceFeatures2KHR fp_vkGetPhysicalDeviceFeatures2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceFeatures2KHR{}; + void* fp_vkGetPhysicalDeviceFeatures2KHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) PFN_vkGetPhysicalDeviceProperties2KHR fp_vkGetPhysicalDeviceProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceProperties2KHR{}; + void* fp_vkGetPhysicalDeviceProperties2KHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) PFN_vkGetPhysicalDeviceFormatProperties2KHR fp_vkGetPhysicalDeviceFormatProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceFormatProperties2KHR{}; + void* fp_vkGetPhysicalDeviceFormatProperties2KHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) PFN_vkGetPhysicalDeviceImageFormatProperties2KHR fp_vkGetPhysicalDeviceImageFormatProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceImageFormatProperties2KHR{}; + void* fp_vkGetPhysicalDeviceImageFormatProperties2KHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR{}; + void* fp_vkGetPhysicalDeviceQueueFamilyProperties2KHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) PFN_vkGetPhysicalDeviceMemoryProperties2KHR fp_vkGetPhysicalDeviceMemoryProperties2KHR = nullptr; #else - void * fp_vkGetPhysicalDeviceMemoryProperties2KHR{}; + void* fp_vkGetPhysicalDeviceMemoryProperties2KHR{}; #endif #if (defined(VK_KHR_get_physical_device_properties2)) - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = nullptr; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + nullptr; #else - void * fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR{}; + void* fp_vkGetPhysicalDeviceSparseImageFormatProperties2KHR{}; #endif #if (defined(VK_KHR_external_memory_capabilities)) PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR{}; + void* fp_vkGetPhysicalDeviceExternalBufferPropertiesKHR{}; #endif #if (defined(VK_KHR_external_semaphore_capabilities)) - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = nullptr; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + nullptr; #else - void * fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR{}; + void* fp_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR{}; #endif #if (defined(VK_KHR_external_fence_capabilities)) PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR fp_vkGetPhysicalDeviceExternalFencePropertiesKHR = nullptr; #else - void * fp_vkGetPhysicalDeviceExternalFencePropertiesKHR{}; + void* fp_vkGetPhysicalDeviceExternalFencePropertiesKHR{}; #endif #if (defined(VK_KHR_device_group_creation)) PFN_vkEnumeratePhysicalDeviceGroupsKHR fp_vkEnumeratePhysicalDeviceGroupsKHR = nullptr; #else - void * fp_vkEnumeratePhysicalDeviceGroupsKHR{}; + void* fp_vkEnumeratePhysicalDeviceGroupsKHR{}; #endif #if (defined(VK_EXT_calibrated_timestamps)) PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = nullptr; #else - void * fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT{}; + void* fp_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT{}; #endif #if (defined(VK_EXT_tooling_info)) PFN_vkGetPhysicalDeviceToolPropertiesEXT fp_vkGetPhysicalDeviceToolPropertiesEXT = nullptr; #else - void * fp_vkGetPhysicalDeviceToolPropertiesEXT{}; + void* fp_vkGetPhysicalDeviceToolPropertiesEXT{}; #endif - bool is_populated() const { return populated; } + bool is_populated() const { return populated; } VkInstance instance = VK_NULL_HANDLE; -private: - bool populated = false; + + private: + bool populated = false; }; -struct DispatchTable { +struct DispatchTable +{ DispatchTable() = default; - DispatchTable(VkDevice device, PFN_vkGetDeviceProcAddr procAddr) : device(device), populated(true) { + DispatchTable(VkDevice device, PFN_vkGetDeviceProcAddr procAddr) : device(device), populated(true) + { fp_vkGetDeviceProcAddr = procAddr; - fp_vkGetDeviceQueue = reinterpret_cast(procAddr(device, "vkGetDeviceQueue")); - fp_vkQueueSubmit = reinterpret_cast(procAddr(device, "vkQueueSubmit")); - fp_vkQueueWaitIdle = reinterpret_cast(procAddr(device, "vkQueueWaitIdle")); - fp_vkDeviceWaitIdle = reinterpret_cast(procAddr(device, "vkDeviceWaitIdle")); - fp_vkAllocateMemory = reinterpret_cast(procAddr(device, "vkAllocateMemory")); - fp_vkFreeMemory = reinterpret_cast(procAddr(device, "vkFreeMemory")); - fp_vkMapMemory = reinterpret_cast(procAddr(device, "vkMapMemory")); - fp_vkUnmapMemory = reinterpret_cast(procAddr(device, "vkUnmapMemory")); - fp_vkFlushMappedMemoryRanges = reinterpret_cast(procAddr(device, "vkFlushMappedMemoryRanges")); - fp_vkInvalidateMappedMemoryRanges = reinterpret_cast(procAddr(device, "vkInvalidateMappedMemoryRanges")); - fp_vkGetDeviceMemoryCommitment = reinterpret_cast(procAddr(device, "vkGetDeviceMemoryCommitment")); - fp_vkGetBufferMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements")); + fp_vkGetDeviceQueue = reinterpret_cast(procAddr(device, "vkGetDeviceQueue")); + fp_vkQueueSubmit = reinterpret_cast(procAddr(device, "vkQueueSubmit")); + fp_vkQueueWaitIdle = reinterpret_cast(procAddr(device, "vkQueueWaitIdle")); + fp_vkDeviceWaitIdle = reinterpret_cast(procAddr(device, "vkDeviceWaitIdle")); + fp_vkAllocateMemory = reinterpret_cast(procAddr(device, "vkAllocateMemory")); + fp_vkFreeMemory = reinterpret_cast(procAddr(device, "vkFreeMemory")); + fp_vkMapMemory = reinterpret_cast(procAddr(device, "vkMapMemory")); + fp_vkUnmapMemory = reinterpret_cast(procAddr(device, "vkUnmapMemory")); + fp_vkFlushMappedMemoryRanges = + reinterpret_cast(procAddr(device, "vkFlushMappedMemoryRanges")); + fp_vkInvalidateMappedMemoryRanges = + reinterpret_cast(procAddr(device, "vkInvalidateMappedMemoryRanges")); + fp_vkGetDeviceMemoryCommitment = + reinterpret_cast(procAddr(device, "vkGetDeviceMemoryCommitment")); + fp_vkGetBufferMemoryRequirements = + reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements")); fp_vkBindBufferMemory = reinterpret_cast(procAddr(device, "vkBindBufferMemory")); - fp_vkGetImageMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements")); + fp_vkGetImageMemoryRequirements = + reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements")); fp_vkBindImageMemory = reinterpret_cast(procAddr(device, "vkBindImageMemory")); - fp_vkGetImageSparseMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetImageSparseMemoryRequirements")); - fp_vkQueueBindSparse = reinterpret_cast(procAddr(device, "vkQueueBindSparse")); - fp_vkCreateFence = reinterpret_cast(procAddr(device, "vkCreateFence")); - fp_vkDestroyFence = reinterpret_cast(procAddr(device, "vkDestroyFence")); - fp_vkResetFences = reinterpret_cast(procAddr(device, "vkResetFences")); - fp_vkGetFenceStatus = reinterpret_cast(procAddr(device, "vkGetFenceStatus")); - fp_vkWaitForFences = reinterpret_cast(procAddr(device, "vkWaitForFences")); - fp_vkCreateSemaphore = reinterpret_cast(procAddr(device, "vkCreateSemaphore")); + fp_vkGetImageSparseMemoryRequirements = reinterpret_cast( + procAddr(device, "vkGetImageSparseMemoryRequirements")); + fp_vkQueueBindSparse = reinterpret_cast(procAddr(device, "vkQueueBindSparse")); + fp_vkCreateFence = reinterpret_cast(procAddr(device, "vkCreateFence")); + fp_vkDestroyFence = reinterpret_cast(procAddr(device, "vkDestroyFence")); + fp_vkResetFences = reinterpret_cast(procAddr(device, "vkResetFences")); + fp_vkGetFenceStatus = reinterpret_cast(procAddr(device, "vkGetFenceStatus")); + fp_vkWaitForFences = reinterpret_cast(procAddr(device, "vkWaitForFences")); + fp_vkCreateSemaphore = reinterpret_cast(procAddr(device, "vkCreateSemaphore")); fp_vkDestroySemaphore = reinterpret_cast(procAddr(device, "vkDestroySemaphore")); - fp_vkCreateEvent = reinterpret_cast(procAddr(device, "vkCreateEvent")); - fp_vkDestroyEvent = reinterpret_cast(procAddr(device, "vkDestroyEvent")); - fp_vkGetEventStatus = reinterpret_cast(procAddr(device, "vkGetEventStatus")); - fp_vkSetEvent = reinterpret_cast(procAddr(device, "vkSetEvent")); - fp_vkResetEvent = reinterpret_cast(procAddr(device, "vkResetEvent")); - fp_vkCreateQueryPool = reinterpret_cast(procAddr(device, "vkCreateQueryPool")); + fp_vkCreateEvent = reinterpret_cast(procAddr(device, "vkCreateEvent")); + fp_vkDestroyEvent = reinterpret_cast(procAddr(device, "vkDestroyEvent")); + fp_vkGetEventStatus = reinterpret_cast(procAddr(device, "vkGetEventStatus")); + fp_vkSetEvent = reinterpret_cast(procAddr(device, "vkSetEvent")); + fp_vkResetEvent = reinterpret_cast(procAddr(device, "vkResetEvent")); + fp_vkCreateQueryPool = reinterpret_cast(procAddr(device, "vkCreateQueryPool")); fp_vkDestroyQueryPool = reinterpret_cast(procAddr(device, "vkDestroyQueryPool")); - fp_vkGetQueryPoolResults = reinterpret_cast(procAddr(device, "vkGetQueryPoolResults")); + fp_vkGetQueryPoolResults = + reinterpret_cast(procAddr(device, "vkGetQueryPoolResults")); #if (defined(VK_VERSION_1_2)) fp_vkResetQueryPool = reinterpret_cast(procAddr(device, "vkResetQueryPool")); #endif - fp_vkCreateBuffer = reinterpret_cast(procAddr(device, "vkCreateBuffer")); - fp_vkDestroyBuffer = reinterpret_cast(procAddr(device, "vkDestroyBuffer")); - fp_vkCreateBufferView = reinterpret_cast(procAddr(device, "vkCreateBufferView")); + fp_vkCreateBuffer = reinterpret_cast(procAddr(device, "vkCreateBuffer")); + fp_vkDestroyBuffer = reinterpret_cast(procAddr(device, "vkDestroyBuffer")); + fp_vkCreateBufferView = reinterpret_cast(procAddr(device, "vkCreateBufferView")); fp_vkDestroyBufferView = reinterpret_cast(procAddr(device, "vkDestroyBufferView")); - fp_vkCreateImage = reinterpret_cast(procAddr(device, "vkCreateImage")); - fp_vkDestroyImage = reinterpret_cast(procAddr(device, "vkDestroyImage")); - fp_vkGetImageSubresourceLayout = reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout")); - fp_vkCreateImageView = reinterpret_cast(procAddr(device, "vkCreateImageView")); - fp_vkDestroyImageView = reinterpret_cast(procAddr(device, "vkDestroyImageView")); + fp_vkCreateImage = reinterpret_cast(procAddr(device, "vkCreateImage")); + fp_vkDestroyImage = reinterpret_cast(procAddr(device, "vkDestroyImage")); + fp_vkGetImageSubresourceLayout = + reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout")); + fp_vkCreateImageView = reinterpret_cast(procAddr(device, "vkCreateImageView")); + fp_vkDestroyImageView = reinterpret_cast(procAddr(device, "vkDestroyImageView")); fp_vkCreateShaderModule = reinterpret_cast(procAddr(device, "vkCreateShaderModule")); - fp_vkDestroyShaderModule = reinterpret_cast(procAddr(device, "vkDestroyShaderModule")); - fp_vkCreatePipelineCache = reinterpret_cast(procAddr(device, "vkCreatePipelineCache")); - fp_vkDestroyPipelineCache = reinterpret_cast(procAddr(device, "vkDestroyPipelineCache")); - fp_vkGetPipelineCacheData = reinterpret_cast(procAddr(device, "vkGetPipelineCacheData")); - fp_vkMergePipelineCaches = reinterpret_cast(procAddr(device, "vkMergePipelineCaches")); + fp_vkDestroyShaderModule = + reinterpret_cast(procAddr(device, "vkDestroyShaderModule")); + fp_vkCreatePipelineCache = + reinterpret_cast(procAddr(device, "vkCreatePipelineCache")); + fp_vkDestroyPipelineCache = + reinterpret_cast(procAddr(device, "vkDestroyPipelineCache")); + fp_vkGetPipelineCacheData = + reinterpret_cast(procAddr(device, "vkGetPipelineCacheData")); + fp_vkMergePipelineCaches = + reinterpret_cast(procAddr(device, "vkMergePipelineCaches")); #if (defined(VK_KHR_pipeline_binary)) - fp_vkCreatePipelineBinariesKHR = reinterpret_cast(procAddr(device, "vkCreatePipelineBinariesKHR")); + fp_vkCreatePipelineBinariesKHR = + reinterpret_cast(procAddr(device, "vkCreatePipelineBinariesKHR")); #endif #if (defined(VK_KHR_pipeline_binary)) - fp_vkDestroyPipelineBinaryKHR = reinterpret_cast(procAddr(device, "vkDestroyPipelineBinaryKHR")); + fp_vkDestroyPipelineBinaryKHR = + reinterpret_cast(procAddr(device, "vkDestroyPipelineBinaryKHR")); #endif #if (defined(VK_KHR_pipeline_binary)) fp_vkGetPipelineKeyKHR = reinterpret_cast(procAddr(device, "vkGetPipelineKeyKHR")); #endif #if (defined(VK_KHR_pipeline_binary)) - fp_vkGetPipelineBinaryDataKHR = reinterpret_cast(procAddr(device, "vkGetPipelineBinaryDataKHR")); + fp_vkGetPipelineBinaryDataKHR = + reinterpret_cast(procAddr(device, "vkGetPipelineBinaryDataKHR")); #endif #if (defined(VK_KHR_pipeline_binary)) - fp_vkReleaseCapturedPipelineDataKHR = reinterpret_cast(procAddr(device, "vkReleaseCapturedPipelineDataKHR")); + fp_vkReleaseCapturedPipelineDataKHR = reinterpret_cast( + procAddr(device, "vkReleaseCapturedPipelineDataKHR")); #endif - fp_vkCreateGraphicsPipelines = reinterpret_cast(procAddr(device, "vkCreateGraphicsPipelines")); - fp_vkCreateComputePipelines = reinterpret_cast(procAddr(device, "vkCreateComputePipelines")); + fp_vkCreateGraphicsPipelines = + reinterpret_cast(procAddr(device, "vkCreateGraphicsPipelines")); + fp_vkCreateComputePipelines = + reinterpret_cast(procAddr(device, "vkCreateComputePipelines")); #if (defined(VK_HUAWEI_subpass_shading)) - fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = reinterpret_cast(procAddr(device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI")); + fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + reinterpret_cast( + procAddr(device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI")); #endif fp_vkDestroyPipeline = reinterpret_cast(procAddr(device, "vkDestroyPipeline")); - fp_vkCreatePipelineLayout = reinterpret_cast(procAddr(device, "vkCreatePipelineLayout")); - fp_vkDestroyPipelineLayout = reinterpret_cast(procAddr(device, "vkDestroyPipelineLayout")); - fp_vkCreateSampler = reinterpret_cast(procAddr(device, "vkCreateSampler")); + fp_vkCreatePipelineLayout = + reinterpret_cast(procAddr(device, "vkCreatePipelineLayout")); + fp_vkDestroyPipelineLayout = + reinterpret_cast(procAddr(device, "vkDestroyPipelineLayout")); + fp_vkCreateSampler = reinterpret_cast(procAddr(device, "vkCreateSampler")); fp_vkDestroySampler = reinterpret_cast(procAddr(device, "vkDestroySampler")); - fp_vkCreateDescriptorSetLayout = reinterpret_cast(procAddr(device, "vkCreateDescriptorSetLayout")); - fp_vkDestroyDescriptorSetLayout = reinterpret_cast(procAddr(device, "vkDestroyDescriptorSetLayout")); - fp_vkCreateDescriptorPool = reinterpret_cast(procAddr(device, "vkCreateDescriptorPool")); - fp_vkDestroyDescriptorPool = reinterpret_cast(procAddr(device, "vkDestroyDescriptorPool")); - fp_vkResetDescriptorPool = reinterpret_cast(procAddr(device, "vkResetDescriptorPool")); - fp_vkAllocateDescriptorSets = reinterpret_cast(procAddr(device, "vkAllocateDescriptorSets")); + fp_vkCreateDescriptorSetLayout = + reinterpret_cast(procAddr(device, "vkCreateDescriptorSetLayout")); + fp_vkDestroyDescriptorSetLayout = + reinterpret_cast(procAddr(device, "vkDestroyDescriptorSetLayout")); + fp_vkCreateDescriptorPool = + reinterpret_cast(procAddr(device, "vkCreateDescriptorPool")); + fp_vkDestroyDescriptorPool = + reinterpret_cast(procAddr(device, "vkDestroyDescriptorPool")); + fp_vkResetDescriptorPool = + reinterpret_cast(procAddr(device, "vkResetDescriptorPool")); + fp_vkAllocateDescriptorSets = + reinterpret_cast(procAddr(device, "vkAllocateDescriptorSets")); fp_vkFreeDescriptorSets = reinterpret_cast(procAddr(device, "vkFreeDescriptorSets")); - fp_vkUpdateDescriptorSets = reinterpret_cast(procAddr(device, "vkUpdateDescriptorSets")); - fp_vkCreateFramebuffer = reinterpret_cast(procAddr(device, "vkCreateFramebuffer")); + fp_vkUpdateDescriptorSets = + reinterpret_cast(procAddr(device, "vkUpdateDescriptorSets")); + fp_vkCreateFramebuffer = reinterpret_cast(procAddr(device, "vkCreateFramebuffer")); fp_vkDestroyFramebuffer = reinterpret_cast(procAddr(device, "vkDestroyFramebuffer")); - fp_vkCreateRenderPass = reinterpret_cast(procAddr(device, "vkCreateRenderPass")); - fp_vkDestroyRenderPass = reinterpret_cast(procAddr(device, "vkDestroyRenderPass")); - fp_vkGetRenderAreaGranularity = reinterpret_cast(procAddr(device, "vkGetRenderAreaGranularity")); + fp_vkCreateRenderPass = reinterpret_cast(procAddr(device, "vkCreateRenderPass")); + fp_vkDestroyRenderPass = reinterpret_cast(procAddr(device, "vkDestroyRenderPass")); + fp_vkGetRenderAreaGranularity = + reinterpret_cast(procAddr(device, "vkGetRenderAreaGranularity")); #if (defined(VK_KHR_maintenance5)) - fp_vkGetRenderingAreaGranularityKHR = reinterpret_cast(procAddr(device, "vkGetRenderingAreaGranularityKHR")); + fp_vkGetRenderingAreaGranularityKHR = reinterpret_cast( + procAddr(device, "vkGetRenderingAreaGranularityKHR")); #endif - fp_vkCreateCommandPool = reinterpret_cast(procAddr(device, "vkCreateCommandPool")); + fp_vkCreateCommandPool = reinterpret_cast(procAddr(device, "vkCreateCommandPool")); fp_vkDestroyCommandPool = reinterpret_cast(procAddr(device, "vkDestroyCommandPool")); - fp_vkResetCommandPool = reinterpret_cast(procAddr(device, "vkResetCommandPool")); - fp_vkAllocateCommandBuffers = reinterpret_cast(procAddr(device, "vkAllocateCommandBuffers")); + fp_vkResetCommandPool = reinterpret_cast(procAddr(device, "vkResetCommandPool")); + fp_vkAllocateCommandBuffers = + reinterpret_cast(procAddr(device, "vkAllocateCommandBuffers")); fp_vkFreeCommandBuffers = reinterpret_cast(procAddr(device, "vkFreeCommandBuffers")); fp_vkBeginCommandBuffer = reinterpret_cast(procAddr(device, "vkBeginCommandBuffer")); - fp_vkEndCommandBuffer = reinterpret_cast(procAddr(device, "vkEndCommandBuffer")); + fp_vkEndCommandBuffer = reinterpret_cast(procAddr(device, "vkEndCommandBuffer")); fp_vkResetCommandBuffer = reinterpret_cast(procAddr(device, "vkResetCommandBuffer")); - fp_vkCmdBindPipeline = reinterpret_cast(procAddr(device, "vkCmdBindPipeline")); + fp_vkCmdBindPipeline = reinterpret_cast(procAddr(device, "vkCmdBindPipeline")); #if (defined(VK_EXT_attachment_feedback_loop_dynamic_state)) - fp_vkCmdSetAttachmentFeedbackLoopEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetAttachmentFeedbackLoopEnableEXT")); + fp_vkCmdSetAttachmentFeedbackLoopEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetAttachmentFeedbackLoopEnableEXT")); #endif - fp_vkCmdSetViewport = reinterpret_cast(procAddr(device, "vkCmdSetViewport")); - fp_vkCmdSetScissor = reinterpret_cast(procAddr(device, "vkCmdSetScissor")); + fp_vkCmdSetViewport = reinterpret_cast(procAddr(device, "vkCmdSetViewport")); + fp_vkCmdSetScissor = reinterpret_cast(procAddr(device, "vkCmdSetScissor")); fp_vkCmdSetLineWidth = reinterpret_cast(procAddr(device, "vkCmdSetLineWidth")); fp_vkCmdSetDepthBias = reinterpret_cast(procAddr(device, "vkCmdSetDepthBias")); - fp_vkCmdSetBlendConstants = reinterpret_cast(procAddr(device, "vkCmdSetBlendConstants")); + fp_vkCmdSetBlendConstants = + reinterpret_cast(procAddr(device, "vkCmdSetBlendConstants")); fp_vkCmdSetDepthBounds = reinterpret_cast(procAddr(device, "vkCmdSetDepthBounds")); - fp_vkCmdSetStencilCompareMask = reinterpret_cast(procAddr(device, "vkCmdSetStencilCompareMask")); - fp_vkCmdSetStencilWriteMask = reinterpret_cast(procAddr(device, "vkCmdSetStencilWriteMask")); - fp_vkCmdSetStencilReference = reinterpret_cast(procAddr(device, "vkCmdSetStencilReference")); - fp_vkCmdBindDescriptorSets = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorSets")); + fp_vkCmdSetStencilCompareMask = + reinterpret_cast(procAddr(device, "vkCmdSetStencilCompareMask")); + fp_vkCmdSetStencilWriteMask = + reinterpret_cast(procAddr(device, "vkCmdSetStencilWriteMask")); + fp_vkCmdSetStencilReference = + reinterpret_cast(procAddr(device, "vkCmdSetStencilReference")); + fp_vkCmdBindDescriptorSets = + reinterpret_cast(procAddr(device, "vkCmdBindDescriptorSets")); fp_vkCmdBindIndexBuffer = reinterpret_cast(procAddr(device, "vkCmdBindIndexBuffer")); - fp_vkCmdBindVertexBuffers = reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers")); - fp_vkCmdDraw = reinterpret_cast(procAddr(device, "vkCmdDraw")); + fp_vkCmdBindVertexBuffers = + reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers")); + fp_vkCmdDraw = reinterpret_cast(procAddr(device, "vkCmdDraw")); fp_vkCmdDrawIndexed = reinterpret_cast(procAddr(device, "vkCmdDrawIndexed")); #if (defined(VK_EXT_multi_draw)) fp_vkCmdDrawMultiEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMultiEXT")); #endif #if (defined(VK_EXT_multi_draw)) - fp_vkCmdDrawMultiIndexedEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMultiIndexedEXT")); + fp_vkCmdDrawMultiIndexedEXT = + reinterpret_cast(procAddr(device, "vkCmdDrawMultiIndexedEXT")); #endif fp_vkCmdDrawIndirect = reinterpret_cast(procAddr(device, "vkCmdDrawIndirect")); - fp_vkCmdDrawIndexedIndirect = reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirect")); + fp_vkCmdDrawIndexedIndirect = + reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirect")); fp_vkCmdDispatch = reinterpret_cast(procAddr(device, "vkCmdDispatch")); - fp_vkCmdDispatchIndirect = reinterpret_cast(procAddr(device, "vkCmdDispatchIndirect")); + fp_vkCmdDispatchIndirect = + reinterpret_cast(procAddr(device, "vkCmdDispatchIndirect")); #if (defined(VK_HUAWEI_subpass_shading)) - fp_vkCmdSubpassShadingHUAWEI = reinterpret_cast(procAddr(device, "vkCmdSubpassShadingHUAWEI")); + fp_vkCmdSubpassShadingHUAWEI = + reinterpret_cast(procAddr(device, "vkCmdSubpassShadingHUAWEI")); #endif #if (defined(VK_HUAWEI_cluster_culling_shader)) - fp_vkCmdDrawClusterHUAWEI = reinterpret_cast(procAddr(device, "vkCmdDrawClusterHUAWEI")); + fp_vkCmdDrawClusterHUAWEI = + reinterpret_cast(procAddr(device, "vkCmdDrawClusterHUAWEI")); #endif #if (defined(VK_HUAWEI_cluster_culling_shader)) - fp_vkCmdDrawClusterIndirectHUAWEI = reinterpret_cast(procAddr(device, "vkCmdDrawClusterIndirectHUAWEI")); + fp_vkCmdDrawClusterIndirectHUAWEI = + reinterpret_cast(procAddr(device, "vkCmdDrawClusterIndirectHUAWEI")); #endif #if (defined(VK_NV_device_generated_commands_compute)) - fp_vkCmdUpdatePipelineIndirectBufferNV = reinterpret_cast(procAddr(device, "vkCmdUpdatePipelineIndirectBufferNV")); + fp_vkCmdUpdatePipelineIndirectBufferNV = reinterpret_cast( + procAddr(device, "vkCmdUpdatePipelineIndirectBufferNV")); #endif fp_vkCmdCopyBuffer = reinterpret_cast(procAddr(device, "vkCmdCopyBuffer")); - fp_vkCmdCopyImage = reinterpret_cast(procAddr(device, "vkCmdCopyImage")); - fp_vkCmdBlitImage = reinterpret_cast(procAddr(device, "vkCmdBlitImage")); - fp_vkCmdCopyBufferToImage = reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage")); - fp_vkCmdCopyImageToBuffer = reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer")); + fp_vkCmdCopyImage = reinterpret_cast(procAddr(device, "vkCmdCopyImage")); + fp_vkCmdBlitImage = reinterpret_cast(procAddr(device, "vkCmdBlitImage")); + fp_vkCmdCopyBufferToImage = + reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage")); + fp_vkCmdCopyImageToBuffer = + reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer")); #if (defined(VK_NV_copy_memory_indirect)) - fp_vkCmdCopyMemoryIndirectNV = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryIndirectNV")); + fp_vkCmdCopyMemoryIndirectNV = + reinterpret_cast(procAddr(device, "vkCmdCopyMemoryIndirectNV")); #endif #if (defined(VK_NV_copy_memory_indirect)) - fp_vkCmdCopyMemoryToImageIndirectNV = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToImageIndirectNV")); + fp_vkCmdCopyMemoryToImageIndirectNV = reinterpret_cast( + procAddr(device, "vkCmdCopyMemoryToImageIndirectNV")); #endif - fp_vkCmdUpdateBuffer = reinterpret_cast(procAddr(device, "vkCmdUpdateBuffer")); - fp_vkCmdFillBuffer = reinterpret_cast(procAddr(device, "vkCmdFillBuffer")); + fp_vkCmdUpdateBuffer = reinterpret_cast(procAddr(device, "vkCmdUpdateBuffer")); + fp_vkCmdFillBuffer = reinterpret_cast(procAddr(device, "vkCmdFillBuffer")); fp_vkCmdClearColorImage = reinterpret_cast(procAddr(device, "vkCmdClearColorImage")); - fp_vkCmdClearDepthStencilImage = reinterpret_cast(procAddr(device, "vkCmdClearDepthStencilImage")); - fp_vkCmdClearAttachments = reinterpret_cast(procAddr(device, "vkCmdClearAttachments")); - fp_vkCmdResolveImage = reinterpret_cast(procAddr(device, "vkCmdResolveImage")); - fp_vkCmdSetEvent = reinterpret_cast(procAddr(device, "vkCmdSetEvent")); - fp_vkCmdResetEvent = reinterpret_cast(procAddr(device, "vkCmdResetEvent")); - fp_vkCmdWaitEvents = reinterpret_cast(procAddr(device, "vkCmdWaitEvents")); + fp_vkCmdClearDepthStencilImage = + reinterpret_cast(procAddr(device, "vkCmdClearDepthStencilImage")); + fp_vkCmdClearAttachments = + reinterpret_cast(procAddr(device, "vkCmdClearAttachments")); + fp_vkCmdResolveImage = reinterpret_cast(procAddr(device, "vkCmdResolveImage")); + fp_vkCmdSetEvent = reinterpret_cast(procAddr(device, "vkCmdSetEvent")); + fp_vkCmdResetEvent = reinterpret_cast(procAddr(device, "vkCmdResetEvent")); + fp_vkCmdWaitEvents = reinterpret_cast(procAddr(device, "vkCmdWaitEvents")); fp_vkCmdPipelineBarrier = reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier")); - fp_vkCmdBeginQuery = reinterpret_cast(procAddr(device, "vkCmdBeginQuery")); - fp_vkCmdEndQuery = reinterpret_cast(procAddr(device, "vkCmdEndQuery")); + fp_vkCmdBeginQuery = reinterpret_cast(procAddr(device, "vkCmdBeginQuery")); + fp_vkCmdEndQuery = reinterpret_cast(procAddr(device, "vkCmdEndQuery")); #if (defined(VK_EXT_conditional_rendering)) - fp_vkCmdBeginConditionalRenderingEXT = reinterpret_cast(procAddr(device, "vkCmdBeginConditionalRenderingEXT")); + fp_vkCmdBeginConditionalRenderingEXT = reinterpret_cast( + procAddr(device, "vkCmdBeginConditionalRenderingEXT")); #endif #if (defined(VK_EXT_conditional_rendering)) - fp_vkCmdEndConditionalRenderingEXT = reinterpret_cast(procAddr(device, "vkCmdEndConditionalRenderingEXT")); + fp_vkCmdEndConditionalRenderingEXT = + reinterpret_cast(procAddr(device, "vkCmdEndConditionalRenderingEXT")); #endif fp_vkCmdResetQueryPool = reinterpret_cast(procAddr(device, "vkCmdResetQueryPool")); fp_vkCmdWriteTimestamp = reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp")); - fp_vkCmdCopyQueryPoolResults = reinterpret_cast(procAddr(device, "vkCmdCopyQueryPoolResults")); - fp_vkCmdPushConstants = reinterpret_cast(procAddr(device, "vkCmdPushConstants")); + fp_vkCmdCopyQueryPoolResults = + reinterpret_cast(procAddr(device, "vkCmdCopyQueryPoolResults")); + fp_vkCmdPushConstants = reinterpret_cast(procAddr(device, "vkCmdPushConstants")); fp_vkCmdBeginRenderPass = reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass")); - fp_vkCmdNextSubpass = reinterpret_cast(procAddr(device, "vkCmdNextSubpass")); - fp_vkCmdEndRenderPass = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass")); + fp_vkCmdNextSubpass = reinterpret_cast(procAddr(device, "vkCmdNextSubpass")); + fp_vkCmdEndRenderPass = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass")); fp_vkCmdExecuteCommands = reinterpret_cast(procAddr(device, "vkCmdExecuteCommands")); #if (defined(VK_KHR_display_swapchain)) - fp_vkCreateSharedSwapchainsKHR = reinterpret_cast(procAddr(device, "vkCreateSharedSwapchainsKHR")); + fp_vkCreateSharedSwapchainsKHR = + reinterpret_cast(procAddr(device, "vkCreateSharedSwapchainsKHR")); #endif #if (defined(VK_KHR_swapchain)) fp_vkCreateSwapchainKHR = reinterpret_cast(procAddr(device, "vkCreateSwapchainKHR")); #endif #if (defined(VK_KHR_swapchain)) - fp_vkDestroySwapchainKHR = reinterpret_cast(procAddr(device, "vkDestroySwapchainKHR")); + fp_vkDestroySwapchainKHR = + reinterpret_cast(procAddr(device, "vkDestroySwapchainKHR")); #endif #if (defined(VK_KHR_swapchain)) - fp_vkGetSwapchainImagesKHR = reinterpret_cast(procAddr(device, "vkGetSwapchainImagesKHR")); + fp_vkGetSwapchainImagesKHR = + reinterpret_cast(procAddr(device, "vkGetSwapchainImagesKHR")); #endif #if (defined(VK_KHR_swapchain)) - fp_vkAcquireNextImageKHR = reinterpret_cast(procAddr(device, "vkAcquireNextImageKHR")); + fp_vkAcquireNextImageKHR = + reinterpret_cast(procAddr(device, "vkAcquireNextImageKHR")); #endif #if (defined(VK_KHR_swapchain)) fp_vkQueuePresentKHR = reinterpret_cast(procAddr(device, "vkQueuePresentKHR")); #endif #if (defined(VK_EXT_debug_marker)) - fp_vkDebugMarkerSetObjectNameEXT = reinterpret_cast(procAddr(device, "vkDebugMarkerSetObjectNameEXT")); + fp_vkDebugMarkerSetObjectNameEXT = + reinterpret_cast(procAddr(device, "vkDebugMarkerSetObjectNameEXT")); #endif #if (defined(VK_EXT_debug_marker)) - fp_vkDebugMarkerSetObjectTagEXT = reinterpret_cast(procAddr(device, "vkDebugMarkerSetObjectTagEXT")); + fp_vkDebugMarkerSetObjectTagEXT = + reinterpret_cast(procAddr(device, "vkDebugMarkerSetObjectTagEXT")); #endif #if (defined(VK_EXT_debug_marker)) - fp_vkCmdDebugMarkerBeginEXT = reinterpret_cast(procAddr(device, "vkCmdDebugMarkerBeginEXT")); + fp_vkCmdDebugMarkerBeginEXT = + reinterpret_cast(procAddr(device, "vkCmdDebugMarkerBeginEXT")); #endif #if (defined(VK_EXT_debug_marker)) - fp_vkCmdDebugMarkerEndEXT = reinterpret_cast(procAddr(device, "vkCmdDebugMarkerEndEXT")); + fp_vkCmdDebugMarkerEndEXT = + reinterpret_cast(procAddr(device, "vkCmdDebugMarkerEndEXT")); #endif #if (defined(VK_EXT_debug_marker)) - fp_vkCmdDebugMarkerInsertEXT = reinterpret_cast(procAddr(device, "vkCmdDebugMarkerInsertEXT")); + fp_vkCmdDebugMarkerInsertEXT = + reinterpret_cast(procAddr(device, "vkCmdDebugMarkerInsertEXT")); #endif #if (defined(VK_NV_external_memory_win32)) - fp_vkGetMemoryWin32HandleNV = reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandleNV")); + fp_vkGetMemoryWin32HandleNV = + reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandleNV")); #endif #if (defined(VK_NV_device_generated_commands)) - fp_vkCmdExecuteGeneratedCommandsNV = reinterpret_cast(procAddr(device, "vkCmdExecuteGeneratedCommandsNV")); + fp_vkCmdExecuteGeneratedCommandsNV = + reinterpret_cast(procAddr(device, "vkCmdExecuteGeneratedCommandsNV")); #endif #if (defined(VK_NV_device_generated_commands)) - fp_vkCmdPreprocessGeneratedCommandsNV = reinterpret_cast(procAddr(device, "vkCmdPreprocessGeneratedCommandsNV")); + fp_vkCmdPreprocessGeneratedCommandsNV = reinterpret_cast( + procAddr(device, "vkCmdPreprocessGeneratedCommandsNV")); #endif #if (defined(VK_NV_device_generated_commands)) - fp_vkCmdBindPipelineShaderGroupNV = reinterpret_cast(procAddr(device, "vkCmdBindPipelineShaderGroupNV")); + fp_vkCmdBindPipelineShaderGroupNV = + reinterpret_cast(procAddr(device, "vkCmdBindPipelineShaderGroupNV")); #endif #if (defined(VK_NV_device_generated_commands)) - fp_vkGetGeneratedCommandsMemoryRequirementsNV = reinterpret_cast(procAddr(device, "vkGetGeneratedCommandsMemoryRequirementsNV")); + fp_vkGetGeneratedCommandsMemoryRequirementsNV = + reinterpret_cast( + procAddr(device, "vkGetGeneratedCommandsMemoryRequirementsNV")); #endif #if (defined(VK_NV_device_generated_commands)) - fp_vkCreateIndirectCommandsLayoutNV = reinterpret_cast(procAddr(device, "vkCreateIndirectCommandsLayoutNV")); + fp_vkCreateIndirectCommandsLayoutNV = reinterpret_cast( + procAddr(device, "vkCreateIndirectCommandsLayoutNV")); #endif #if (defined(VK_NV_device_generated_commands)) - fp_vkDestroyIndirectCommandsLayoutNV = reinterpret_cast(procAddr(device, "vkDestroyIndirectCommandsLayoutNV")); + fp_vkDestroyIndirectCommandsLayoutNV = reinterpret_cast( + procAddr(device, "vkDestroyIndirectCommandsLayoutNV")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkCmdExecuteGeneratedCommandsEXT = reinterpret_cast(procAddr(device, "vkCmdExecuteGeneratedCommandsEXT")); + fp_vkCmdExecuteGeneratedCommandsEXT = reinterpret_cast( + procAddr(device, "vkCmdExecuteGeneratedCommandsEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkCmdPreprocessGeneratedCommandsEXT = reinterpret_cast(procAddr(device, "vkCmdPreprocessGeneratedCommandsEXT")); + fp_vkCmdPreprocessGeneratedCommandsEXT = reinterpret_cast( + procAddr(device, "vkCmdPreprocessGeneratedCommandsEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkGetGeneratedCommandsMemoryRequirementsEXT = reinterpret_cast(procAddr(device, "vkGetGeneratedCommandsMemoryRequirementsEXT")); + fp_vkGetGeneratedCommandsMemoryRequirementsEXT = + reinterpret_cast( + procAddr(device, "vkGetGeneratedCommandsMemoryRequirementsEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkCreateIndirectCommandsLayoutEXT = reinterpret_cast(procAddr(device, "vkCreateIndirectCommandsLayoutEXT")); + fp_vkCreateIndirectCommandsLayoutEXT = reinterpret_cast( + procAddr(device, "vkCreateIndirectCommandsLayoutEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkDestroyIndirectCommandsLayoutEXT = reinterpret_cast(procAddr(device, "vkDestroyIndirectCommandsLayoutEXT")); + fp_vkDestroyIndirectCommandsLayoutEXT = reinterpret_cast( + procAddr(device, "vkDestroyIndirectCommandsLayoutEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkCreateIndirectExecutionSetEXT = reinterpret_cast(procAddr(device, "vkCreateIndirectExecutionSetEXT")); + fp_vkCreateIndirectExecutionSetEXT = + reinterpret_cast(procAddr(device, "vkCreateIndirectExecutionSetEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkDestroyIndirectExecutionSetEXT = reinterpret_cast(procAddr(device, "vkDestroyIndirectExecutionSetEXT")); + fp_vkDestroyIndirectExecutionSetEXT = reinterpret_cast( + procAddr(device, "vkDestroyIndirectExecutionSetEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkUpdateIndirectExecutionSetPipelineEXT = reinterpret_cast(procAddr(device, "vkUpdateIndirectExecutionSetPipelineEXT")); + fp_vkUpdateIndirectExecutionSetPipelineEXT = reinterpret_cast( + procAddr(device, "vkUpdateIndirectExecutionSetPipelineEXT")); #endif #if (defined(VK_EXT_device_generated_commands)) - fp_vkUpdateIndirectExecutionSetShaderEXT = reinterpret_cast(procAddr(device, "vkUpdateIndirectExecutionSetShaderEXT")); + fp_vkUpdateIndirectExecutionSetShaderEXT = reinterpret_cast( + procAddr(device, "vkUpdateIndirectExecutionSetShaderEXT")); #endif #if (defined(VK_KHR_push_descriptor)) - fp_vkCmdPushDescriptorSetKHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetKHR")); + fp_vkCmdPushDescriptorSetKHR = + reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetKHR")); #endif #if (defined(VK_VERSION_1_1)) fp_vkTrimCommandPool = reinterpret_cast(procAddr(device, "vkTrimCommandPool")); #endif #if (defined(VK_KHR_external_memory_win32)) - fp_vkGetMemoryWin32HandleKHR = reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandleKHR")); + fp_vkGetMemoryWin32HandleKHR = + reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandleKHR")); #endif #if (defined(VK_KHR_external_memory_win32)) - fp_vkGetMemoryWin32HandlePropertiesKHR = reinterpret_cast(procAddr(device, "vkGetMemoryWin32HandlePropertiesKHR")); + fp_vkGetMemoryWin32HandlePropertiesKHR = reinterpret_cast( + procAddr(device, "vkGetMemoryWin32HandlePropertiesKHR")); #endif #if (defined(VK_KHR_external_memory_fd)) fp_vkGetMemoryFdKHR = reinterpret_cast(procAddr(device, "vkGetMemoryFdKHR")); #endif #if (defined(VK_KHR_external_memory_fd)) - fp_vkGetMemoryFdPropertiesKHR = reinterpret_cast(procAddr(device, "vkGetMemoryFdPropertiesKHR")); + fp_vkGetMemoryFdPropertiesKHR = + reinterpret_cast(procAddr(device, "vkGetMemoryFdPropertiesKHR")); #endif #if (defined(VK_FUCHSIA_external_memory)) - fp_vkGetMemoryZirconHandleFUCHSIA = reinterpret_cast(procAddr(device, "vkGetMemoryZirconHandleFUCHSIA")); + fp_vkGetMemoryZirconHandleFUCHSIA = + reinterpret_cast(procAddr(device, "vkGetMemoryZirconHandleFUCHSIA")); #endif #if (defined(VK_FUCHSIA_external_memory)) - fp_vkGetMemoryZirconHandlePropertiesFUCHSIA = reinterpret_cast(procAddr(device, "vkGetMemoryZirconHandlePropertiesFUCHSIA")); + fp_vkGetMemoryZirconHandlePropertiesFUCHSIA = reinterpret_cast( + procAddr(device, "vkGetMemoryZirconHandlePropertiesFUCHSIA")); #endif #if (defined(VK_NV_external_memory_rdma)) - fp_vkGetMemoryRemoteAddressNV = reinterpret_cast(procAddr(device, "vkGetMemoryRemoteAddressNV")); + fp_vkGetMemoryRemoteAddressNV = + reinterpret_cast(procAddr(device, "vkGetMemoryRemoteAddressNV")); #endif #if (defined(VK_NV_external_memory_sci_buf)) fp_vkGetMemorySciBufNV = reinterpret_cast(procAddr(device, "vkGetMemorySciBufNV")); #endif #if (defined(VK_KHR_external_semaphore_win32)) - fp_vkGetSemaphoreWin32HandleKHR = reinterpret_cast(procAddr(device, "vkGetSemaphoreWin32HandleKHR")); + fp_vkGetSemaphoreWin32HandleKHR = + reinterpret_cast(procAddr(device, "vkGetSemaphoreWin32HandleKHR")); #endif #if (defined(VK_KHR_external_semaphore_win32)) - fp_vkImportSemaphoreWin32HandleKHR = reinterpret_cast(procAddr(device, "vkImportSemaphoreWin32HandleKHR")); + fp_vkImportSemaphoreWin32HandleKHR = + reinterpret_cast(procAddr(device, "vkImportSemaphoreWin32HandleKHR")); #endif #if (defined(VK_KHR_external_semaphore_fd)) fp_vkGetSemaphoreFdKHR = reinterpret_cast(procAddr(device, "vkGetSemaphoreFdKHR")); #endif #if (defined(VK_KHR_external_semaphore_fd)) - fp_vkImportSemaphoreFdKHR = reinterpret_cast(procAddr(device, "vkImportSemaphoreFdKHR")); + fp_vkImportSemaphoreFdKHR = + reinterpret_cast(procAddr(device, "vkImportSemaphoreFdKHR")); #endif #if (defined(VK_FUCHSIA_external_semaphore)) - fp_vkGetSemaphoreZirconHandleFUCHSIA = reinterpret_cast(procAddr(device, "vkGetSemaphoreZirconHandleFUCHSIA")); + fp_vkGetSemaphoreZirconHandleFUCHSIA = reinterpret_cast( + procAddr(device, "vkGetSemaphoreZirconHandleFUCHSIA")); #endif #if (defined(VK_FUCHSIA_external_semaphore)) - fp_vkImportSemaphoreZirconHandleFUCHSIA = reinterpret_cast(procAddr(device, "vkImportSemaphoreZirconHandleFUCHSIA")); + fp_vkImportSemaphoreZirconHandleFUCHSIA = reinterpret_cast( + procAddr(device, "vkImportSemaphoreZirconHandleFUCHSIA")); #endif #if (defined(VK_KHR_external_fence_win32)) - fp_vkGetFenceWin32HandleKHR = reinterpret_cast(procAddr(device, "vkGetFenceWin32HandleKHR")); + fp_vkGetFenceWin32HandleKHR = + reinterpret_cast(procAddr(device, "vkGetFenceWin32HandleKHR")); #endif #if (defined(VK_KHR_external_fence_win32)) - fp_vkImportFenceWin32HandleKHR = reinterpret_cast(procAddr(device, "vkImportFenceWin32HandleKHR")); + fp_vkImportFenceWin32HandleKHR = + reinterpret_cast(procAddr(device, "vkImportFenceWin32HandleKHR")); #endif #if (defined(VK_KHR_external_fence_fd)) fp_vkGetFenceFdKHR = reinterpret_cast(procAddr(device, "vkGetFenceFdKHR")); @@ -1665,43 +2290,56 @@ struct DispatchTable { fp_vkImportFenceFdKHR = reinterpret_cast(procAddr(device, "vkImportFenceFdKHR")); #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - fp_vkGetFenceSciSyncFenceNV = reinterpret_cast(procAddr(device, "vkGetFenceSciSyncFenceNV")); + fp_vkGetFenceSciSyncFenceNV = + reinterpret_cast(procAddr(device, "vkGetFenceSciSyncFenceNV")); #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - fp_vkGetFenceSciSyncObjNV = reinterpret_cast(procAddr(device, "vkGetFenceSciSyncObjNV")); + fp_vkGetFenceSciSyncObjNV = + reinterpret_cast(procAddr(device, "vkGetFenceSciSyncObjNV")); #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - fp_vkImportFenceSciSyncFenceNV = reinterpret_cast(procAddr(device, "vkImportFenceSciSyncFenceNV")); + fp_vkImportFenceSciSyncFenceNV = + reinterpret_cast(procAddr(device, "vkImportFenceSciSyncFenceNV")); #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - fp_vkImportFenceSciSyncObjNV = reinterpret_cast(procAddr(device, "vkImportFenceSciSyncObjNV")); + fp_vkImportFenceSciSyncObjNV = + reinterpret_cast(procAddr(device, "vkImportFenceSciSyncObjNV")); #endif #if (defined(VK_NV_external_sci_sync)) - fp_vkGetSemaphoreSciSyncObjNV = reinterpret_cast(procAddr(device, "vkGetSemaphoreSciSyncObjNV")); + fp_vkGetSemaphoreSciSyncObjNV = + reinterpret_cast(procAddr(device, "vkGetSemaphoreSciSyncObjNV")); #endif #if (defined(VK_NV_external_sci_sync)) - fp_vkImportSemaphoreSciSyncObjNV = reinterpret_cast(procAddr(device, "vkImportSemaphoreSciSyncObjNV")); + fp_vkImportSemaphoreSciSyncObjNV = + reinterpret_cast(procAddr(device, "vkImportSemaphoreSciSyncObjNV")); #endif #if (defined(VK_NV_external_sci_sync2)) - fp_vkCreateSemaphoreSciSyncPoolNV = reinterpret_cast(procAddr(device, "vkCreateSemaphoreSciSyncPoolNV")); + fp_vkCreateSemaphoreSciSyncPoolNV = + reinterpret_cast(procAddr(device, "vkCreateSemaphoreSciSyncPoolNV")); #endif #if (defined(VK_NV_external_sci_sync2)) - fp_vkDestroySemaphoreSciSyncPoolNV = reinterpret_cast(procAddr(device, "vkDestroySemaphoreSciSyncPoolNV")); + fp_vkDestroySemaphoreSciSyncPoolNV = + reinterpret_cast(procAddr(device, "vkDestroySemaphoreSciSyncPoolNV")); #endif #if (defined(VK_EXT_display_control)) - fp_vkDisplayPowerControlEXT = reinterpret_cast(procAddr(device, "vkDisplayPowerControlEXT")); + fp_vkDisplayPowerControlEXT = + reinterpret_cast(procAddr(device, "vkDisplayPowerControlEXT")); #endif #if (defined(VK_EXT_display_control)) - fp_vkRegisterDeviceEventEXT = reinterpret_cast(procAddr(device, "vkRegisterDeviceEventEXT")); + fp_vkRegisterDeviceEventEXT = + reinterpret_cast(procAddr(device, "vkRegisterDeviceEventEXT")); #endif #if (defined(VK_EXT_display_control)) - fp_vkRegisterDisplayEventEXT = reinterpret_cast(procAddr(device, "vkRegisterDisplayEventEXT")); + fp_vkRegisterDisplayEventEXT = + reinterpret_cast(procAddr(device, "vkRegisterDisplayEventEXT")); #endif #if (defined(VK_EXT_display_control)) - fp_vkGetSwapchainCounterEXT = reinterpret_cast(procAddr(device, "vkGetSwapchainCounterEXT")); + fp_vkGetSwapchainCounterEXT = + reinterpret_cast(procAddr(device, "vkGetSwapchainCounterEXT")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetDeviceGroupPeerMemoryFeatures = reinterpret_cast(procAddr(device, "vkGetDeviceGroupPeerMemoryFeatures")); + fp_vkGetDeviceGroupPeerMemoryFeatures = reinterpret_cast( + procAddr(device, "vkGetDeviceGroupPeerMemoryFeatures")); #endif #if (defined(VK_VERSION_1_1)) fp_vkBindBufferMemory2 = reinterpret_cast(procAddr(device, "vkBindBufferMemory2")); @@ -1713,109 +2351,141 @@ struct DispatchTable { fp_vkCmdSetDeviceMask = reinterpret_cast(procAddr(device, "vkCmdSetDeviceMask")); #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - fp_vkGetDeviceGroupPresentCapabilitiesKHR = reinterpret_cast(procAddr(device, "vkGetDeviceGroupPresentCapabilitiesKHR")); + fp_vkGetDeviceGroupPresentCapabilitiesKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceGroupPresentCapabilitiesKHR")); #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - fp_vkGetDeviceGroupSurfacePresentModesKHR = reinterpret_cast(procAddr(device, "vkGetDeviceGroupSurfacePresentModesKHR")); + fp_vkGetDeviceGroupSurfacePresentModesKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceGroupSurfacePresentModesKHR")); #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - fp_vkAcquireNextImage2KHR = reinterpret_cast(procAddr(device, "vkAcquireNextImage2KHR")); + fp_vkAcquireNextImage2KHR = + reinterpret_cast(procAddr(device, "vkAcquireNextImage2KHR")); #endif #if (defined(VK_VERSION_1_1)) fp_vkCmdDispatchBase = reinterpret_cast(procAddr(device, "vkCmdDispatchBase")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkCreateDescriptorUpdateTemplate = reinterpret_cast(procAddr(device, "vkCreateDescriptorUpdateTemplate")); + fp_vkCreateDescriptorUpdateTemplate = reinterpret_cast( + procAddr(device, "vkCreateDescriptorUpdateTemplate")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkDestroyDescriptorUpdateTemplate = reinterpret_cast(procAddr(device, "vkDestroyDescriptorUpdateTemplate")); + fp_vkDestroyDescriptorUpdateTemplate = reinterpret_cast( + procAddr(device, "vkDestroyDescriptorUpdateTemplate")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkUpdateDescriptorSetWithTemplate = reinterpret_cast(procAddr(device, "vkUpdateDescriptorSetWithTemplate")); + fp_vkUpdateDescriptorSetWithTemplate = reinterpret_cast( + procAddr(device, "vkUpdateDescriptorSetWithTemplate")); #endif #if (defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_descriptor_update_template)) - fp_vkCmdPushDescriptorSetWithTemplateKHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetWithTemplateKHR")); + fp_vkCmdPushDescriptorSetWithTemplateKHR = reinterpret_cast( + procAddr(device, "vkCmdPushDescriptorSetWithTemplateKHR")); #endif #if (defined(VK_EXT_hdr_metadata)) fp_vkSetHdrMetadataEXT = reinterpret_cast(procAddr(device, "vkSetHdrMetadataEXT")); #endif #if (defined(VK_KHR_shared_presentable_image)) - fp_vkGetSwapchainStatusKHR = reinterpret_cast(procAddr(device, "vkGetSwapchainStatusKHR")); + fp_vkGetSwapchainStatusKHR = + reinterpret_cast(procAddr(device, "vkGetSwapchainStatusKHR")); #endif #if (defined(VK_GOOGLE_display_timing)) - fp_vkGetRefreshCycleDurationGOOGLE = reinterpret_cast(procAddr(device, "vkGetRefreshCycleDurationGOOGLE")); + fp_vkGetRefreshCycleDurationGOOGLE = + reinterpret_cast(procAddr(device, "vkGetRefreshCycleDurationGOOGLE")); #endif #if (defined(VK_GOOGLE_display_timing)) - fp_vkGetPastPresentationTimingGOOGLE = reinterpret_cast(procAddr(device, "vkGetPastPresentationTimingGOOGLE")); + fp_vkGetPastPresentationTimingGOOGLE = reinterpret_cast( + procAddr(device, "vkGetPastPresentationTimingGOOGLE")); #endif #if (defined(VK_NV_clip_space_w_scaling)) - fp_vkCmdSetViewportWScalingNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportWScalingNV")); + fp_vkCmdSetViewportWScalingNV = + reinterpret_cast(procAddr(device, "vkCmdSetViewportWScalingNV")); #endif #if (defined(VK_EXT_discard_rectangles)) - fp_vkCmdSetDiscardRectangleEXT = reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleEXT")); + fp_vkCmdSetDiscardRectangleEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleEXT")); #endif #if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 - fp_vkCmdSetDiscardRectangleEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleEnableEXT")); + fp_vkCmdSetDiscardRectangleEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetDiscardRectangleEnableEXT")); #endif #if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 - fp_vkCmdSetDiscardRectangleModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleModeEXT")); + fp_vkCmdSetDiscardRectangleModeEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDiscardRectangleModeEXT")); #endif #if (defined(VK_EXT_sample_locations)) - fp_vkCmdSetSampleLocationsEXT = reinterpret_cast(procAddr(device, "vkCmdSetSampleLocationsEXT")); + fp_vkCmdSetSampleLocationsEXT = + reinterpret_cast(procAddr(device, "vkCmdSetSampleLocationsEXT")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetBufferMemoryRequirements2 = reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements2")); + fp_vkGetBufferMemoryRequirements2 = + reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetImageMemoryRequirements2 = reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements2")); + fp_vkGetImageMemoryRequirements2 = + reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements2")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetImageSparseMemoryRequirements2 = reinterpret_cast(procAddr(device, "vkGetImageSparseMemoryRequirements2")); + fp_vkGetImageSparseMemoryRequirements2 = reinterpret_cast( + procAddr(device, "vkGetImageSparseMemoryRequirements2")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkGetDeviceBufferMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetDeviceBufferMemoryRequirements")); + fp_vkGetDeviceBufferMemoryRequirements = reinterpret_cast( + procAddr(device, "vkGetDeviceBufferMemoryRequirements")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkGetDeviceImageMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetDeviceImageMemoryRequirements")); + fp_vkGetDeviceImageMemoryRequirements = reinterpret_cast( + procAddr(device, "vkGetDeviceImageMemoryRequirements")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkGetDeviceImageSparseMemoryRequirements = reinterpret_cast(procAddr(device, "vkGetDeviceImageSparseMemoryRequirements")); + fp_vkGetDeviceImageSparseMemoryRequirements = reinterpret_cast( + procAddr(device, "vkGetDeviceImageSparseMemoryRequirements")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkCreateSamplerYcbcrConversion = reinterpret_cast(procAddr(device, "vkCreateSamplerYcbcrConversion")); + fp_vkCreateSamplerYcbcrConversion = + reinterpret_cast(procAddr(device, "vkCreateSamplerYcbcrConversion")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkDestroySamplerYcbcrConversion = reinterpret_cast(procAddr(device, "vkDestroySamplerYcbcrConversion")); + fp_vkDestroySamplerYcbcrConversion = + reinterpret_cast(procAddr(device, "vkDestroySamplerYcbcrConversion")); #endif #if (defined(VK_VERSION_1_1)) fp_vkGetDeviceQueue2 = reinterpret_cast(procAddr(device, "vkGetDeviceQueue2")); #endif #if (defined(VK_EXT_validation_cache)) - fp_vkCreateValidationCacheEXT = reinterpret_cast(procAddr(device, "vkCreateValidationCacheEXT")); + fp_vkCreateValidationCacheEXT = + reinterpret_cast(procAddr(device, "vkCreateValidationCacheEXT")); #endif #if (defined(VK_EXT_validation_cache)) - fp_vkDestroyValidationCacheEXT = reinterpret_cast(procAddr(device, "vkDestroyValidationCacheEXT")); + fp_vkDestroyValidationCacheEXT = + reinterpret_cast(procAddr(device, "vkDestroyValidationCacheEXT")); #endif #if (defined(VK_EXT_validation_cache)) - fp_vkGetValidationCacheDataEXT = reinterpret_cast(procAddr(device, "vkGetValidationCacheDataEXT")); + fp_vkGetValidationCacheDataEXT = + reinterpret_cast(procAddr(device, "vkGetValidationCacheDataEXT")); #endif #if (defined(VK_EXT_validation_cache)) - fp_vkMergeValidationCachesEXT = reinterpret_cast(procAddr(device, "vkMergeValidationCachesEXT")); + fp_vkMergeValidationCachesEXT = + reinterpret_cast(procAddr(device, "vkMergeValidationCachesEXT")); #endif #if (defined(VK_VERSION_1_1)) - fp_vkGetDescriptorSetLayoutSupport = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSupport")); + fp_vkGetDescriptorSetLayoutSupport = + reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSupport")); #endif #if (defined(VK_ANDROID_native_buffer)) - fp_vkGetSwapchainGrallocUsageANDROID = reinterpret_cast(procAddr(device, "vkGetSwapchainGrallocUsageANDROID")); + fp_vkGetSwapchainGrallocUsageANDROID = reinterpret_cast( + procAddr(device, "vkGetSwapchainGrallocUsageANDROID")); #endif #if (defined(VK_ANDROID_native_buffer)) - fp_vkGetSwapchainGrallocUsage2ANDROID = reinterpret_cast(procAddr(device, "vkGetSwapchainGrallocUsage2ANDROID")); + fp_vkGetSwapchainGrallocUsage2ANDROID = reinterpret_cast( + procAddr(device, "vkGetSwapchainGrallocUsage2ANDROID")); #endif #if (defined(VK_ANDROID_native_buffer)) - fp_vkAcquireImageANDROID = reinterpret_cast(procAddr(device, "vkAcquireImageANDROID")); + fp_vkAcquireImageANDROID = + reinterpret_cast(procAddr(device, "vkAcquireImageANDROID")); #endif #if (defined(VK_ANDROID_native_buffer)) - fp_vkQueueSignalReleaseImageANDROID = reinterpret_cast(procAddr(device, "vkQueueSignalReleaseImageANDROID")); + fp_vkQueueSignalReleaseImageANDROID = reinterpret_cast( + procAddr(device, "vkQueueSignalReleaseImageANDROID")); #endif #if (defined(VK_AMD_shader_info)) fp_vkGetShaderInfoAMD = reinterpret_cast(procAddr(device, "vkGetShaderInfoAMD")); @@ -1824,43 +2494,55 @@ struct DispatchTable { fp_vkSetLocalDimmingAMD = reinterpret_cast(procAddr(device, "vkSetLocalDimmingAMD")); #endif #if (defined(VK_KHR_calibrated_timestamps)) - fp_vkGetCalibratedTimestampsKHR = reinterpret_cast(procAddr(device, "vkGetCalibratedTimestampsKHR")); + fp_vkGetCalibratedTimestampsKHR = + reinterpret_cast(procAddr(device, "vkGetCalibratedTimestampsKHR")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkSetDebugUtilsObjectNameEXT = reinterpret_cast(procAddr(device, "vkSetDebugUtilsObjectNameEXT")); + fp_vkSetDebugUtilsObjectNameEXT = + reinterpret_cast(procAddr(device, "vkSetDebugUtilsObjectNameEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkSetDebugUtilsObjectTagEXT = reinterpret_cast(procAddr(device, "vkSetDebugUtilsObjectTagEXT")); + fp_vkSetDebugUtilsObjectTagEXT = + reinterpret_cast(procAddr(device, "vkSetDebugUtilsObjectTagEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkQueueBeginDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkQueueBeginDebugUtilsLabelEXT")); + fp_vkQueueBeginDebugUtilsLabelEXT = + reinterpret_cast(procAddr(device, "vkQueueBeginDebugUtilsLabelEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkQueueEndDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkQueueEndDebugUtilsLabelEXT")); + fp_vkQueueEndDebugUtilsLabelEXT = + reinterpret_cast(procAddr(device, "vkQueueEndDebugUtilsLabelEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkQueueInsertDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkQueueInsertDebugUtilsLabelEXT")); + fp_vkQueueInsertDebugUtilsLabelEXT = + reinterpret_cast(procAddr(device, "vkQueueInsertDebugUtilsLabelEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkCmdBeginDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkCmdBeginDebugUtilsLabelEXT")); + fp_vkCmdBeginDebugUtilsLabelEXT = + reinterpret_cast(procAddr(device, "vkCmdBeginDebugUtilsLabelEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkCmdEndDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkCmdEndDebugUtilsLabelEXT")); + fp_vkCmdEndDebugUtilsLabelEXT = + reinterpret_cast(procAddr(device, "vkCmdEndDebugUtilsLabelEXT")); #endif #if (defined(VK_EXT_debug_utils)) - fp_vkCmdInsertDebugUtilsLabelEXT = reinterpret_cast(procAddr(device, "vkCmdInsertDebugUtilsLabelEXT")); + fp_vkCmdInsertDebugUtilsLabelEXT = + reinterpret_cast(procAddr(device, "vkCmdInsertDebugUtilsLabelEXT")); #endif #if (defined(VK_EXT_external_memory_host)) - fp_vkGetMemoryHostPointerPropertiesEXT = reinterpret_cast(procAddr(device, "vkGetMemoryHostPointerPropertiesEXT")); + fp_vkGetMemoryHostPointerPropertiesEXT = reinterpret_cast( + procAddr(device, "vkGetMemoryHostPointerPropertiesEXT")); #endif #if (defined(VK_AMD_buffer_marker)) - fp_vkCmdWriteBufferMarkerAMD = reinterpret_cast(procAddr(device, "vkCmdWriteBufferMarkerAMD")); + fp_vkCmdWriteBufferMarkerAMD = + reinterpret_cast(procAddr(device, "vkCmdWriteBufferMarkerAMD")); #endif #if (defined(VK_VERSION_1_2)) fp_vkCreateRenderPass2 = reinterpret_cast(procAddr(device, "vkCreateRenderPass2")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkCmdBeginRenderPass2 = reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass2")); + fp_vkCmdBeginRenderPass2 = + reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass2")); #endif #if (defined(VK_VERSION_1_2)) fp_vkCmdNextSubpass2 = reinterpret_cast(procAddr(device, "vkCmdNextSubpass2")); @@ -1869,7 +2551,8 @@ struct DispatchTable { fp_vkCmdEndRenderPass2 = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass2")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkGetSemaphoreCounterValue = reinterpret_cast(procAddr(device, "vkGetSemaphoreCounterValue")); + fp_vkGetSemaphoreCounterValue = + reinterpret_cast(procAddr(device, "vkGetSemaphoreCounterValue")); #endif #if (defined(VK_VERSION_1_2)) fp_vkWaitSemaphores = reinterpret_cast(procAddr(device, "vkWaitSemaphores")); @@ -1878,127 +2561,170 @@ struct DispatchTable { fp_vkSignalSemaphore = reinterpret_cast(procAddr(device, "vkSignalSemaphore")); #endif #if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) - fp_vkGetAndroidHardwareBufferPropertiesANDROID = reinterpret_cast(procAddr(device, "vkGetAndroidHardwareBufferPropertiesANDROID")); + fp_vkGetAndroidHardwareBufferPropertiesANDROID = + reinterpret_cast( + procAddr(device, "vkGetAndroidHardwareBufferPropertiesANDROID")); #endif #if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) - fp_vkGetMemoryAndroidHardwareBufferANDROID = reinterpret_cast(procAddr(device, "vkGetMemoryAndroidHardwareBufferANDROID")); + fp_vkGetMemoryAndroidHardwareBufferANDROID = reinterpret_cast( + procAddr(device, "vkGetMemoryAndroidHardwareBufferANDROID")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkCmdDrawIndirectCount = reinterpret_cast(procAddr(device, "vkCmdDrawIndirectCount")); + fp_vkCmdDrawIndirectCount = + reinterpret_cast(procAddr(device, "vkCmdDrawIndirectCount")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkCmdDrawIndexedIndirectCount = reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirectCount")); + fp_vkCmdDrawIndexedIndirectCount = + reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirectCount")); #endif #if (defined(VK_NV_device_diagnostic_checkpoints)) fp_vkCmdSetCheckpointNV = reinterpret_cast(procAddr(device, "vkCmdSetCheckpointNV")); #endif #if (defined(VK_NV_device_diagnostic_checkpoints)) - fp_vkGetQueueCheckpointDataNV = reinterpret_cast(procAddr(device, "vkGetQueueCheckpointDataNV")); + fp_vkGetQueueCheckpointDataNV = + reinterpret_cast(procAddr(device, "vkGetQueueCheckpointDataNV")); #endif #if (defined(VK_EXT_transform_feedback)) - fp_vkCmdBindTransformFeedbackBuffersEXT = reinterpret_cast(procAddr(device, "vkCmdBindTransformFeedbackBuffersEXT")); + fp_vkCmdBindTransformFeedbackBuffersEXT = reinterpret_cast( + procAddr(device, "vkCmdBindTransformFeedbackBuffersEXT")); #endif #if (defined(VK_EXT_transform_feedback)) - fp_vkCmdBeginTransformFeedbackEXT = reinterpret_cast(procAddr(device, "vkCmdBeginTransformFeedbackEXT")); + fp_vkCmdBeginTransformFeedbackEXT = + reinterpret_cast(procAddr(device, "vkCmdBeginTransformFeedbackEXT")); #endif #if (defined(VK_EXT_transform_feedback)) - fp_vkCmdEndTransformFeedbackEXT = reinterpret_cast(procAddr(device, "vkCmdEndTransformFeedbackEXT")); + fp_vkCmdEndTransformFeedbackEXT = + reinterpret_cast(procAddr(device, "vkCmdEndTransformFeedbackEXT")); #endif #if (defined(VK_EXT_transform_feedback)) - fp_vkCmdBeginQueryIndexedEXT = reinterpret_cast(procAddr(device, "vkCmdBeginQueryIndexedEXT")); + fp_vkCmdBeginQueryIndexedEXT = + reinterpret_cast(procAddr(device, "vkCmdBeginQueryIndexedEXT")); #endif #if (defined(VK_EXT_transform_feedback)) - fp_vkCmdEndQueryIndexedEXT = reinterpret_cast(procAddr(device, "vkCmdEndQueryIndexedEXT")); + fp_vkCmdEndQueryIndexedEXT = + reinterpret_cast(procAddr(device, "vkCmdEndQueryIndexedEXT")); #endif #if (defined(VK_EXT_transform_feedback)) - fp_vkCmdDrawIndirectByteCountEXT = reinterpret_cast(procAddr(device, "vkCmdDrawIndirectByteCountEXT")); + fp_vkCmdDrawIndirectByteCountEXT = + reinterpret_cast(procAddr(device, "vkCmdDrawIndirectByteCountEXT")); #endif #if (defined(VK_NV_scissor_exclusive)) - fp_vkCmdSetExclusiveScissorNV = reinterpret_cast(procAddr(device, "vkCmdSetExclusiveScissorNV")); + fp_vkCmdSetExclusiveScissorNV = + reinterpret_cast(procAddr(device, "vkCmdSetExclusiveScissorNV")); #endif #if ((defined(VK_NV_scissor_exclusive))) && VK_HEADER_VERSION >= 241 - fp_vkCmdSetExclusiveScissorEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetExclusiveScissorEnableNV")); + fp_vkCmdSetExclusiveScissorEnableNV = reinterpret_cast( + procAddr(device, "vkCmdSetExclusiveScissorEnableNV")); #endif #if (defined(VK_NV_shading_rate_image)) - fp_vkCmdBindShadingRateImageNV = reinterpret_cast(procAddr(device, "vkCmdBindShadingRateImageNV")); + fp_vkCmdBindShadingRateImageNV = + reinterpret_cast(procAddr(device, "vkCmdBindShadingRateImageNV")); #endif #if (defined(VK_NV_shading_rate_image)) - fp_vkCmdSetViewportShadingRatePaletteNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportShadingRatePaletteNV")); + fp_vkCmdSetViewportShadingRatePaletteNV = reinterpret_cast( + procAddr(device, "vkCmdSetViewportShadingRatePaletteNV")); #endif #if (defined(VK_NV_shading_rate_image)) - fp_vkCmdSetCoarseSampleOrderNV = reinterpret_cast(procAddr(device, "vkCmdSetCoarseSampleOrderNV")); + fp_vkCmdSetCoarseSampleOrderNV = + reinterpret_cast(procAddr(device, "vkCmdSetCoarseSampleOrderNV")); #endif #if (defined(VK_NV_mesh_shader)) fp_vkCmdDrawMeshTasksNV = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksNV")); #endif #if (defined(VK_NV_mesh_shader)) - fp_vkCmdDrawMeshTasksIndirectNV = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectNV")); + fp_vkCmdDrawMeshTasksIndirectNV = + reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectNV")); #endif #if (defined(VK_NV_mesh_shader)) - fp_vkCmdDrawMeshTasksIndirectCountNV = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectCountNV")); + fp_vkCmdDrawMeshTasksIndirectCountNV = reinterpret_cast( + procAddr(device, "vkCmdDrawMeshTasksIndirectCountNV")); #endif #if (defined(VK_EXT_mesh_shader)) - fp_vkCmdDrawMeshTasksEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksEXT")); + fp_vkCmdDrawMeshTasksEXT = + reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksEXT")); #endif #if (defined(VK_EXT_mesh_shader)) - fp_vkCmdDrawMeshTasksIndirectEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectEXT")); + fp_vkCmdDrawMeshTasksIndirectEXT = + reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectEXT")); #endif #if (defined(VK_EXT_mesh_shader)) - fp_vkCmdDrawMeshTasksIndirectCountEXT = reinterpret_cast(procAddr(device, "vkCmdDrawMeshTasksIndirectCountEXT")); + fp_vkCmdDrawMeshTasksIndirectCountEXT = reinterpret_cast( + procAddr(device, "vkCmdDrawMeshTasksIndirectCountEXT")); #endif #if (defined(VK_NV_ray_tracing)) fp_vkCompileDeferredNV = reinterpret_cast(procAddr(device, "vkCompileDeferredNV")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkCreateAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkCreateAccelerationStructureNV")); + fp_vkCreateAccelerationStructureNV = + reinterpret_cast(procAddr(device, "vkCreateAccelerationStructureNV")); #endif #if (defined(VK_HUAWEI_invocation_mask)) - fp_vkCmdBindInvocationMaskHUAWEI = reinterpret_cast(procAddr(device, "vkCmdBindInvocationMaskHUAWEI")); + fp_vkCmdBindInvocationMaskHUAWEI = + reinterpret_cast(procAddr(device, "vkCmdBindInvocationMaskHUAWEI")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkDestroyAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkDestroyAccelerationStructureKHR")); + fp_vkDestroyAccelerationStructureKHR = reinterpret_cast( + procAddr(device, "vkDestroyAccelerationStructureKHR")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkDestroyAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkDestroyAccelerationStructureNV")); + fp_vkDestroyAccelerationStructureNV = reinterpret_cast( + procAddr(device, "vkDestroyAccelerationStructureNV")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkGetAccelerationStructureMemoryRequirementsNV = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureMemoryRequirementsNV")); + fp_vkGetAccelerationStructureMemoryRequirementsNV = + reinterpret_cast( + procAddr(device, "vkGetAccelerationStructureMemoryRequirementsNV")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkBindAccelerationStructureMemoryNV = reinterpret_cast(procAddr(device, "vkBindAccelerationStructureMemoryNV")); + fp_vkBindAccelerationStructureMemoryNV = reinterpret_cast( + procAddr(device, "vkBindAccelerationStructureMemoryNV")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkCmdCopyAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkCmdCopyAccelerationStructureNV")); + fp_vkCmdCopyAccelerationStructureNV = reinterpret_cast( + procAddr(device, "vkCmdCopyAccelerationStructureNV")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCmdCopyAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCmdCopyAccelerationStructureKHR")); + fp_vkCmdCopyAccelerationStructureKHR = reinterpret_cast( + procAddr(device, "vkCmdCopyAccelerationStructureKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCopyAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCopyAccelerationStructureKHR")); + fp_vkCopyAccelerationStructureKHR = + reinterpret_cast(procAddr(device, "vkCopyAccelerationStructureKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCmdCopyAccelerationStructureToMemoryKHR = reinterpret_cast(procAddr(device, "vkCmdCopyAccelerationStructureToMemoryKHR")); + fp_vkCmdCopyAccelerationStructureToMemoryKHR = reinterpret_cast( + procAddr(device, "vkCmdCopyAccelerationStructureToMemoryKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCopyAccelerationStructureToMemoryKHR = reinterpret_cast(procAddr(device, "vkCopyAccelerationStructureToMemoryKHR")); + fp_vkCopyAccelerationStructureToMemoryKHR = reinterpret_cast( + procAddr(device, "vkCopyAccelerationStructureToMemoryKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCmdCopyMemoryToAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToAccelerationStructureKHR")); + fp_vkCmdCopyMemoryToAccelerationStructureKHR = reinterpret_cast( + procAddr(device, "vkCmdCopyMemoryToAccelerationStructureKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCopyMemoryToAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCopyMemoryToAccelerationStructureKHR")); + fp_vkCopyMemoryToAccelerationStructureKHR = reinterpret_cast( + procAddr(device, "vkCopyMemoryToAccelerationStructureKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCmdWriteAccelerationStructuresPropertiesKHR = reinterpret_cast(procAddr(device, "vkCmdWriteAccelerationStructuresPropertiesKHR")); + fp_vkCmdWriteAccelerationStructuresPropertiesKHR = + reinterpret_cast( + procAddr(device, "vkCmdWriteAccelerationStructuresPropertiesKHR")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkCmdWriteAccelerationStructuresPropertiesNV = reinterpret_cast(procAddr(device, "vkCmdWriteAccelerationStructuresPropertiesNV")); + fp_vkCmdWriteAccelerationStructuresPropertiesNV = + reinterpret_cast( + procAddr(device, "vkCmdWriteAccelerationStructuresPropertiesNV")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkCmdBuildAccelerationStructureNV = reinterpret_cast(procAddr(device, "vkCmdBuildAccelerationStructureNV")); + fp_vkCmdBuildAccelerationStructureNV = reinterpret_cast( + procAddr(device, "vkCmdBuildAccelerationStructureNV")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkWriteAccelerationStructuresPropertiesKHR = reinterpret_cast(procAddr(device, "vkWriteAccelerationStructuresPropertiesKHR")); + fp_vkWriteAccelerationStructuresPropertiesKHR = + reinterpret_cast( + procAddr(device, "vkWriteAccelerationStructuresPropertiesKHR")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) fp_vkCmdTraceRaysKHR = reinterpret_cast(procAddr(device, "vkCmdTraceRaysKHR")); @@ -2007,139 +2733,188 @@ struct DispatchTable { fp_vkCmdTraceRaysNV = reinterpret_cast(procAddr(device, "vkCmdTraceRaysNV")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - fp_vkGetRayTracingShaderGroupHandlesKHR = reinterpret_cast(procAddr(device, "vkGetRayTracingShaderGroupHandlesKHR")); + fp_vkGetRayTracingShaderGroupHandlesKHR = reinterpret_cast( + procAddr(device, "vkGetRayTracingShaderGroupHandlesKHR")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = reinterpret_cast(procAddr(device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR")); + fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + reinterpret_cast( + procAddr(device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkGetAccelerationStructureHandleNV = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureHandleNV")); + fp_vkGetAccelerationStructureHandleNV = reinterpret_cast( + procAddr(device, "vkGetAccelerationStructureHandleNV")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkCreateRayTracingPipelinesNV = reinterpret_cast(procAddr(device, "vkCreateRayTracingPipelinesNV")); + fp_vkCreateRayTracingPipelinesNV = + reinterpret_cast(procAddr(device, "vkCreateRayTracingPipelinesNV")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - fp_vkCreateRayTracingPipelinesKHR = reinterpret_cast(procAddr(device, "vkCreateRayTracingPipelinesKHR")); + fp_vkCreateRayTracingPipelinesKHR = + reinterpret_cast(procAddr(device, "vkCreateRayTracingPipelinesKHR")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - fp_vkCmdTraceRaysIndirectKHR = reinterpret_cast(procAddr(device, "vkCmdTraceRaysIndirectKHR")); + fp_vkCmdTraceRaysIndirectKHR = + reinterpret_cast(procAddr(device, "vkCmdTraceRaysIndirectKHR")); #endif #if (defined(VK_KHR_ray_tracing_maintenance1)) - fp_vkCmdTraceRaysIndirect2KHR = reinterpret_cast(procAddr(device, "vkCmdTraceRaysIndirect2KHR")); + fp_vkCmdTraceRaysIndirect2KHR = + reinterpret_cast(procAddr(device, "vkCmdTraceRaysIndirect2KHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkGetDeviceAccelerationStructureCompatibilityKHR = reinterpret_cast(procAddr(device, "vkGetDeviceAccelerationStructureCompatibilityKHR")); + fp_vkGetDeviceAccelerationStructureCompatibilityKHR = + reinterpret_cast( + procAddr(device, "vkGetDeviceAccelerationStructureCompatibilityKHR")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - fp_vkGetRayTracingShaderGroupStackSizeKHR = reinterpret_cast(procAddr(device, "vkGetRayTracingShaderGroupStackSizeKHR")); + fp_vkGetRayTracingShaderGroupStackSizeKHR = reinterpret_cast( + procAddr(device, "vkGetRayTracingShaderGroupStackSizeKHR")); #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - fp_vkCmdSetRayTracingPipelineStackSizeKHR = reinterpret_cast(procAddr(device, "vkCmdSetRayTracingPipelineStackSizeKHR")); + fp_vkCmdSetRayTracingPipelineStackSizeKHR = reinterpret_cast( + procAddr(device, "vkCmdSetRayTracingPipelineStackSizeKHR")); #endif #if (defined(VK_EXT_full_screen_exclusive)) - fp_vkGetDeviceGroupSurfacePresentModes2EXT = reinterpret_cast(procAddr(device, "vkGetDeviceGroupSurfacePresentModes2EXT")); + fp_vkGetDeviceGroupSurfacePresentModes2EXT = reinterpret_cast( + procAddr(device, "vkGetDeviceGroupSurfacePresentModes2EXT")); #endif #if (defined(VK_EXT_full_screen_exclusive)) - fp_vkAcquireFullScreenExclusiveModeEXT = reinterpret_cast(procAddr(device, "vkAcquireFullScreenExclusiveModeEXT")); + fp_vkAcquireFullScreenExclusiveModeEXT = reinterpret_cast( + procAddr(device, "vkAcquireFullScreenExclusiveModeEXT")); #endif #if (defined(VK_EXT_full_screen_exclusive)) - fp_vkReleaseFullScreenExclusiveModeEXT = reinterpret_cast(procAddr(device, "vkReleaseFullScreenExclusiveModeEXT")); + fp_vkReleaseFullScreenExclusiveModeEXT = reinterpret_cast( + procAddr(device, "vkReleaseFullScreenExclusiveModeEXT")); #endif #if (defined(VK_KHR_performance_query)) - fp_vkAcquireProfilingLockKHR = reinterpret_cast(procAddr(device, "vkAcquireProfilingLockKHR")); + fp_vkAcquireProfilingLockKHR = + reinterpret_cast(procAddr(device, "vkAcquireProfilingLockKHR")); #endif #if (defined(VK_KHR_performance_query)) - fp_vkReleaseProfilingLockKHR = reinterpret_cast(procAddr(device, "vkReleaseProfilingLockKHR")); + fp_vkReleaseProfilingLockKHR = + reinterpret_cast(procAddr(device, "vkReleaseProfilingLockKHR")); #endif #if (defined(VK_EXT_image_drm_format_modifier)) - fp_vkGetImageDrmFormatModifierPropertiesEXT = reinterpret_cast(procAddr(device, "vkGetImageDrmFormatModifierPropertiesEXT")); + fp_vkGetImageDrmFormatModifierPropertiesEXT = reinterpret_cast( + procAddr(device, "vkGetImageDrmFormatModifierPropertiesEXT")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkGetBufferOpaqueCaptureAddress = reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureAddress")); + fp_vkGetBufferOpaqueCaptureAddress = + reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureAddress")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkGetBufferDeviceAddress = reinterpret_cast(procAddr(device, "vkGetBufferDeviceAddress")); + fp_vkGetBufferDeviceAddress = + reinterpret_cast(procAddr(device, "vkGetBufferDeviceAddress")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkInitializePerformanceApiINTEL = reinterpret_cast(procAddr(device, "vkInitializePerformanceApiINTEL")); + fp_vkInitializePerformanceApiINTEL = + reinterpret_cast(procAddr(device, "vkInitializePerformanceApiINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkUninitializePerformanceApiINTEL = reinterpret_cast(procAddr(device, "vkUninitializePerformanceApiINTEL")); + fp_vkUninitializePerformanceApiINTEL = reinterpret_cast( + procAddr(device, "vkUninitializePerformanceApiINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkCmdSetPerformanceMarkerINTEL = reinterpret_cast(procAddr(device, "vkCmdSetPerformanceMarkerINTEL")); + fp_vkCmdSetPerformanceMarkerINTEL = + reinterpret_cast(procAddr(device, "vkCmdSetPerformanceMarkerINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkCmdSetPerformanceStreamMarkerINTEL = reinterpret_cast(procAddr(device, "vkCmdSetPerformanceStreamMarkerINTEL")); + fp_vkCmdSetPerformanceStreamMarkerINTEL = reinterpret_cast( + procAddr(device, "vkCmdSetPerformanceStreamMarkerINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkCmdSetPerformanceOverrideINTEL = reinterpret_cast(procAddr(device, "vkCmdSetPerformanceOverrideINTEL")); + fp_vkCmdSetPerformanceOverrideINTEL = reinterpret_cast( + procAddr(device, "vkCmdSetPerformanceOverrideINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkAcquirePerformanceConfigurationINTEL = reinterpret_cast(procAddr(device, "vkAcquirePerformanceConfigurationINTEL")); + fp_vkAcquirePerformanceConfigurationINTEL = reinterpret_cast( + procAddr(device, "vkAcquirePerformanceConfigurationINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkReleasePerformanceConfigurationINTEL = reinterpret_cast(procAddr(device, "vkReleasePerformanceConfigurationINTEL")); + fp_vkReleasePerformanceConfigurationINTEL = reinterpret_cast( + procAddr(device, "vkReleasePerformanceConfigurationINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkQueueSetPerformanceConfigurationINTEL = reinterpret_cast(procAddr(device, "vkQueueSetPerformanceConfigurationINTEL")); + fp_vkQueueSetPerformanceConfigurationINTEL = reinterpret_cast( + procAddr(device, "vkQueueSetPerformanceConfigurationINTEL")); #endif #if (defined(VK_INTEL_performance_query)) - fp_vkGetPerformanceParameterINTEL = reinterpret_cast(procAddr(device, "vkGetPerformanceParameterINTEL")); + fp_vkGetPerformanceParameterINTEL = + reinterpret_cast(procAddr(device, "vkGetPerformanceParameterINTEL")); #endif #if (defined(VK_VERSION_1_2)) - fp_vkGetDeviceMemoryOpaqueCaptureAddress = reinterpret_cast(procAddr(device, "vkGetDeviceMemoryOpaqueCaptureAddress")); + fp_vkGetDeviceMemoryOpaqueCaptureAddress = reinterpret_cast( + procAddr(device, "vkGetDeviceMemoryOpaqueCaptureAddress")); #endif #if (defined(VK_KHR_pipeline_executable_properties)) - fp_vkGetPipelineExecutablePropertiesKHR = reinterpret_cast(procAddr(device, "vkGetPipelineExecutablePropertiesKHR")); + fp_vkGetPipelineExecutablePropertiesKHR = reinterpret_cast( + procAddr(device, "vkGetPipelineExecutablePropertiesKHR")); #endif #if (defined(VK_KHR_pipeline_executable_properties)) - fp_vkGetPipelineExecutableStatisticsKHR = reinterpret_cast(procAddr(device, "vkGetPipelineExecutableStatisticsKHR")); + fp_vkGetPipelineExecutableStatisticsKHR = reinterpret_cast( + procAddr(device, "vkGetPipelineExecutableStatisticsKHR")); #endif #if (defined(VK_KHR_pipeline_executable_properties)) - fp_vkGetPipelineExecutableInternalRepresentationsKHR = reinterpret_cast(procAddr(device, "vkGetPipelineExecutableInternalRepresentationsKHR")); + fp_vkGetPipelineExecutableInternalRepresentationsKHR = + reinterpret_cast( + procAddr(device, "vkGetPipelineExecutableInternalRepresentationsKHR")); #endif #if (defined(VK_KHR_line_rasterization)) - fp_vkCmdSetLineStippleKHR = reinterpret_cast(procAddr(device, "vkCmdSetLineStippleKHR")); + fp_vkCmdSetLineStippleKHR = + reinterpret_cast(procAddr(device, "vkCmdSetLineStippleKHR")); #endif #if (defined(VKSC_VERSION_1_0)) fp_vkGetFaultData = reinterpret_cast(procAddr(device, "vkGetFaultData")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCreateAccelerationStructureKHR = reinterpret_cast(procAddr(device, "vkCreateAccelerationStructureKHR")); + fp_vkCreateAccelerationStructureKHR = reinterpret_cast( + procAddr(device, "vkCreateAccelerationStructureKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCmdBuildAccelerationStructuresKHR = reinterpret_cast(procAddr(device, "vkCmdBuildAccelerationStructuresKHR")); + fp_vkCmdBuildAccelerationStructuresKHR = reinterpret_cast( + procAddr(device, "vkCmdBuildAccelerationStructuresKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkCmdBuildAccelerationStructuresIndirectKHR = reinterpret_cast(procAddr(device, "vkCmdBuildAccelerationStructuresIndirectKHR")); + fp_vkCmdBuildAccelerationStructuresIndirectKHR = + reinterpret_cast( + procAddr(device, "vkCmdBuildAccelerationStructuresIndirectKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkBuildAccelerationStructuresKHR = reinterpret_cast(procAddr(device, "vkBuildAccelerationStructuresKHR")); + fp_vkBuildAccelerationStructuresKHR = reinterpret_cast( + procAddr(device, "vkBuildAccelerationStructuresKHR")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkGetAccelerationStructureDeviceAddressKHR = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureDeviceAddressKHR")); + fp_vkGetAccelerationStructureDeviceAddressKHR = + reinterpret_cast( + procAddr(device, "vkGetAccelerationStructureDeviceAddressKHR")); #endif #if (defined(VK_KHR_deferred_host_operations)) - fp_vkCreateDeferredOperationKHR = reinterpret_cast(procAddr(device, "vkCreateDeferredOperationKHR")); + fp_vkCreateDeferredOperationKHR = + reinterpret_cast(procAddr(device, "vkCreateDeferredOperationKHR")); #endif #if (defined(VK_KHR_deferred_host_operations)) - fp_vkDestroyDeferredOperationKHR = reinterpret_cast(procAddr(device, "vkDestroyDeferredOperationKHR")); + fp_vkDestroyDeferredOperationKHR = + reinterpret_cast(procAddr(device, "vkDestroyDeferredOperationKHR")); #endif #if (defined(VK_KHR_deferred_host_operations)) - fp_vkGetDeferredOperationMaxConcurrencyKHR = reinterpret_cast(procAddr(device, "vkGetDeferredOperationMaxConcurrencyKHR")); + fp_vkGetDeferredOperationMaxConcurrencyKHR = reinterpret_cast( + procAddr(device, "vkGetDeferredOperationMaxConcurrencyKHR")); #endif #if (defined(VK_KHR_deferred_host_operations)) - fp_vkGetDeferredOperationResultKHR = reinterpret_cast(procAddr(device, "vkGetDeferredOperationResultKHR")); + fp_vkGetDeferredOperationResultKHR = + reinterpret_cast(procAddr(device, "vkGetDeferredOperationResultKHR")); #endif #if (defined(VK_KHR_deferred_host_operations)) - fp_vkDeferredOperationJoinKHR = reinterpret_cast(procAddr(device, "vkDeferredOperationJoinKHR")); + fp_vkDeferredOperationJoinKHR = + reinterpret_cast(procAddr(device, "vkDeferredOperationJoinKHR")); #endif #if (defined(VK_NV_device_generated_commands_compute)) - fp_vkGetPipelineIndirectMemoryRequirementsNV = reinterpret_cast(procAddr(device, "vkGetPipelineIndirectMemoryRequirementsNV")); + fp_vkGetPipelineIndirectMemoryRequirementsNV = reinterpret_cast( + procAddr(device, "vkGetPipelineIndirectMemoryRequirementsNV")); #endif #if (defined(VK_NV_device_generated_commands_compute)) - fp_vkGetPipelineIndirectDeviceAddressNV = reinterpret_cast(procAddr(device, "vkGetPipelineIndirectDeviceAddressNV")); + fp_vkGetPipelineIndirectDeviceAddressNV = reinterpret_cast( + procAddr(device, "vkGetPipelineIndirectDeviceAddressNV")); #endif #if (defined(VK_AMD_anti_lag)) fp_vkAntiLagUpdateAMD = reinterpret_cast(procAddr(device, "vkAntiLagUpdateAMD")); @@ -2151,151 +2926,200 @@ struct DispatchTable { fp_vkCmdSetFrontFace = reinterpret_cast(procAddr(device, "vkCmdSetFrontFace")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetPrimitiveTopology = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveTopology")); + fp_vkCmdSetPrimitiveTopology = + reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveTopology")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetViewportWithCount = reinterpret_cast(procAddr(device, "vkCmdSetViewportWithCount")); + fp_vkCmdSetViewportWithCount = + reinterpret_cast(procAddr(device, "vkCmdSetViewportWithCount")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetScissorWithCount = reinterpret_cast(procAddr(device, "vkCmdSetScissorWithCount")); + fp_vkCmdSetScissorWithCount = + reinterpret_cast(procAddr(device, "vkCmdSetScissorWithCount")); #endif #if (defined(VK_KHR_maintenance5)) - fp_vkCmdBindIndexBuffer2KHR = reinterpret_cast(procAddr(device, "vkCmdBindIndexBuffer2KHR")); + fp_vkCmdBindIndexBuffer2KHR = + reinterpret_cast(procAddr(device, "vkCmdBindIndexBuffer2KHR")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdBindVertexBuffers2 = reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers2")); + fp_vkCmdBindVertexBuffers2 = + reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers2")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetDepthTestEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthTestEnable")); + fp_vkCmdSetDepthTestEnable = + reinterpret_cast(procAddr(device, "vkCmdSetDepthTestEnable")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetDepthWriteEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthWriteEnable")); + fp_vkCmdSetDepthWriteEnable = + reinterpret_cast(procAddr(device, "vkCmdSetDepthWriteEnable")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetDepthCompareOp = reinterpret_cast(procAddr(device, "vkCmdSetDepthCompareOp")); + fp_vkCmdSetDepthCompareOp = + reinterpret_cast(procAddr(device, "vkCmdSetDepthCompareOp")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetDepthBoundsTestEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthBoundsTestEnable")); + fp_vkCmdSetDepthBoundsTestEnable = + reinterpret_cast(procAddr(device, "vkCmdSetDepthBoundsTestEnable")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetStencilTestEnable = reinterpret_cast(procAddr(device, "vkCmdSetStencilTestEnable")); + fp_vkCmdSetStencilTestEnable = + reinterpret_cast(procAddr(device, "vkCmdSetStencilTestEnable")); #endif #if (defined(VK_VERSION_1_3)) fp_vkCmdSetStencilOp = reinterpret_cast(procAddr(device, "vkCmdSetStencilOp")); #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetPatchControlPointsEXT = reinterpret_cast(procAddr(device, "vkCmdSetPatchControlPointsEXT")); + fp_vkCmdSetPatchControlPointsEXT = + reinterpret_cast(procAddr(device, "vkCmdSetPatchControlPointsEXT")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetRasterizerDiscardEnable = reinterpret_cast(procAddr(device, "vkCmdSetRasterizerDiscardEnable")); + fp_vkCmdSetRasterizerDiscardEnable = + reinterpret_cast(procAddr(device, "vkCmdSetRasterizerDiscardEnable")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetDepthBiasEnable = reinterpret_cast(procAddr(device, "vkCmdSetDepthBiasEnable")); + fp_vkCmdSetDepthBiasEnable = + reinterpret_cast(procAddr(device, "vkCmdSetDepthBiasEnable")); #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) fp_vkCmdSetLogicOpEXT = reinterpret_cast(procAddr(device, "vkCmdSetLogicOpEXT")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdSetPrimitiveRestartEnable = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveRestartEnable")); + fp_vkCmdSetPrimitiveRestartEnable = + reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveRestartEnable")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetTessellationDomainOriginEXT = reinterpret_cast(procAddr(device, "vkCmdSetTessellationDomainOriginEXT")); + fp_vkCmdSetTessellationDomainOriginEXT = reinterpret_cast( + procAddr(device, "vkCmdSetTessellationDomainOriginEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthClampEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClampEnableEXT")); + fp_vkCmdSetDepthClampEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthClampEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetPolygonModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetPolygonModeEXT")); + fp_vkCmdSetPolygonModeEXT = + reinterpret_cast(procAddr(device, "vkCmdSetPolygonModeEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetRasterizationSamplesEXT = reinterpret_cast(procAddr(device, "vkCmdSetRasterizationSamplesEXT")); + fp_vkCmdSetRasterizationSamplesEXT = + reinterpret_cast(procAddr(device, "vkCmdSetRasterizationSamplesEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetSampleMaskEXT = reinterpret_cast(procAddr(device, "vkCmdSetSampleMaskEXT")); + fp_vkCmdSetSampleMaskEXT = + reinterpret_cast(procAddr(device, "vkCmdSetSampleMaskEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetAlphaToCoverageEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetAlphaToCoverageEnableEXT")); + fp_vkCmdSetAlphaToCoverageEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetAlphaToCoverageEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetAlphaToOneEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetAlphaToOneEnableEXT")); + fp_vkCmdSetAlphaToOneEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetAlphaToOneEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetLogicOpEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetLogicOpEnableEXT")); + fp_vkCmdSetLogicOpEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetLogicOpEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetColorBlendEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorBlendEnableEXT")); + fp_vkCmdSetColorBlendEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetColorBlendEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetColorBlendEquationEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorBlendEquationEXT")); + fp_vkCmdSetColorBlendEquationEXT = + reinterpret_cast(procAddr(device, "vkCmdSetColorBlendEquationEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetColorWriteMaskEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorWriteMaskEXT")); + fp_vkCmdSetColorWriteMaskEXT = + reinterpret_cast(procAddr(device, "vkCmdSetColorWriteMaskEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetRasterizationStreamEXT = reinterpret_cast(procAddr(device, "vkCmdSetRasterizationStreamEXT")); + fp_vkCmdSetRasterizationStreamEXT = + reinterpret_cast(procAddr(device, "vkCmdSetRasterizationStreamEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetConservativeRasterizationModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetConservativeRasterizationModeEXT")); + fp_vkCmdSetConservativeRasterizationModeEXT = reinterpret_cast( + procAddr(device, "vkCmdSetConservativeRasterizationModeEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT = reinterpret_cast(procAddr(device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT")); + fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT = + reinterpret_cast( + procAddr(device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthClipEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClipEnableEXT")); + fp_vkCmdSetDepthClipEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthClipEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetSampleLocationsEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetSampleLocationsEnableEXT")); + fp_vkCmdSetSampleLocationsEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetSampleLocationsEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetColorBlendAdvancedEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorBlendAdvancedEXT")); + fp_vkCmdSetColorBlendAdvancedEXT = + reinterpret_cast(procAddr(device, "vkCmdSetColorBlendAdvancedEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetProvokingVertexModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetProvokingVertexModeEXT")); + fp_vkCmdSetProvokingVertexModeEXT = + reinterpret_cast(procAddr(device, "vkCmdSetProvokingVertexModeEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetLineRasterizationModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetLineRasterizationModeEXT")); + fp_vkCmdSetLineRasterizationModeEXT = reinterpret_cast( + procAddr(device, "vkCmdSetLineRasterizationModeEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetLineStippleEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetLineStippleEnableEXT")); + fp_vkCmdSetLineStippleEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetLineStippleEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthClipNegativeOneToOneEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClipNegativeOneToOneEXT")); + fp_vkCmdSetDepthClipNegativeOneToOneEXT = reinterpret_cast( + procAddr(device, "vkCmdSetDepthClipNegativeOneToOneEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetViewportWScalingEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportWScalingEnableNV")); + fp_vkCmdSetViewportWScalingEnableNV = reinterpret_cast( + procAddr(device, "vkCmdSetViewportWScalingEnableNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetViewportSwizzleNV = reinterpret_cast(procAddr(device, "vkCmdSetViewportSwizzleNV")); + fp_vkCmdSetViewportSwizzleNV = + reinterpret_cast(procAddr(device, "vkCmdSetViewportSwizzleNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetCoverageToColorEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageToColorEnableNV")); + fp_vkCmdSetCoverageToColorEnableNV = + reinterpret_cast(procAddr(device, "vkCmdSetCoverageToColorEnableNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetCoverageToColorLocationNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageToColorLocationNV")); + fp_vkCmdSetCoverageToColorLocationNV = reinterpret_cast( + procAddr(device, "vkCmdSetCoverageToColorLocationNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetCoverageModulationModeNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageModulationModeNV")); + fp_vkCmdSetCoverageModulationModeNV = reinterpret_cast( + procAddr(device, "vkCmdSetCoverageModulationModeNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetCoverageModulationTableEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageModulationTableEnableNV")); + fp_vkCmdSetCoverageModulationTableEnableNV = reinterpret_cast( + procAddr(device, "vkCmdSetCoverageModulationTableEnableNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetCoverageModulationTableNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageModulationTableNV")); + fp_vkCmdSetCoverageModulationTableNV = reinterpret_cast( + procAddr(device, "vkCmdSetCoverageModulationTableNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetShadingRateImageEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetShadingRateImageEnableNV")); + fp_vkCmdSetShadingRateImageEnableNV = reinterpret_cast( + procAddr(device, "vkCmdSetShadingRateImageEnableNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetCoverageReductionModeNV = reinterpret_cast(procAddr(device, "vkCmdSetCoverageReductionModeNV")); + fp_vkCmdSetCoverageReductionModeNV = + reinterpret_cast(procAddr(device, "vkCmdSetCoverageReductionModeNV")); #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetRepresentativeFragmentTestEnableNV = reinterpret_cast(procAddr(device, "vkCmdSetRepresentativeFragmentTestEnableNV")); + fp_vkCmdSetRepresentativeFragmentTestEnableNV = + reinterpret_cast( + procAddr(device, "vkCmdSetRepresentativeFragmentTestEnableNV")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCreatePrivateDataSlot = reinterpret_cast(procAddr(device, "vkCreatePrivateDataSlot")); + fp_vkCreatePrivateDataSlot = + reinterpret_cast(procAddr(device, "vkCreatePrivateDataSlot")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkDestroyPrivateDataSlot = reinterpret_cast(procAddr(device, "vkDestroyPrivateDataSlot")); + fp_vkDestroyPrivateDataSlot = + reinterpret_cast(procAddr(device, "vkDestroyPrivateDataSlot")); #endif #if (defined(VK_VERSION_1_3)) fp_vkSetPrivateData = reinterpret_cast(procAddr(device, "vkSetPrivateData")); @@ -2313,31 +3137,39 @@ struct DispatchTable { fp_vkCmdBlitImage2 = reinterpret_cast(procAddr(device, "vkCmdBlitImage2")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdCopyBufferToImage2 = reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage2")); + fp_vkCmdCopyBufferToImage2 = + reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage2")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdCopyImageToBuffer2 = reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer2")); + fp_vkCmdCopyImageToBuffer2 = + reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer2")); #endif #if (defined(VK_VERSION_1_3)) fp_vkCmdResolveImage2 = reinterpret_cast(procAddr(device, "vkCmdResolveImage2")); #endif #if (defined(VK_KHR_object_refresh)) - fp_vkCmdRefreshObjectsKHR = reinterpret_cast(procAddr(device, "vkCmdRefreshObjectsKHR")); + fp_vkCmdRefreshObjectsKHR = + reinterpret_cast(procAddr(device, "vkCmdRefreshObjectsKHR")); #endif #if (defined(VK_KHR_fragment_shading_rate)) - fp_vkCmdSetFragmentShadingRateKHR = reinterpret_cast(procAddr(device, "vkCmdSetFragmentShadingRateKHR")); + fp_vkCmdSetFragmentShadingRateKHR = + reinterpret_cast(procAddr(device, "vkCmdSetFragmentShadingRateKHR")); #endif #if (defined(VK_NV_fragment_shading_rate_enums)) - fp_vkCmdSetFragmentShadingRateEnumNV = reinterpret_cast(procAddr(device, "vkCmdSetFragmentShadingRateEnumNV")); + fp_vkCmdSetFragmentShadingRateEnumNV = reinterpret_cast( + procAddr(device, "vkCmdSetFragmentShadingRateEnumNV")); #endif #if (defined(VK_KHR_acceleration_structure)) - fp_vkGetAccelerationStructureBuildSizesKHR = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureBuildSizesKHR")); + fp_vkGetAccelerationStructureBuildSizesKHR = reinterpret_cast( + procAddr(device, "vkGetAccelerationStructureBuildSizesKHR")); #endif #if (defined(VK_EXT_vertex_input_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetVertexInputEXT = reinterpret_cast(procAddr(device, "vkCmdSetVertexInputEXT")); + fp_vkCmdSetVertexInputEXT = + reinterpret_cast(procAddr(device, "vkCmdSetVertexInputEXT")); #endif #if (defined(VK_EXT_color_write_enable)) - fp_vkCmdSetColorWriteEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetColorWriteEnableEXT")); + fp_vkCmdSetColorWriteEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetColorWriteEnableEXT")); #endif #if (defined(VK_VERSION_1_3)) fp_vkCmdSetEvent2 = reinterpret_cast(procAddr(device, "vkCmdSetEvent2")); @@ -2349,7 +3181,8 @@ struct DispatchTable { fp_vkCmdWaitEvents2 = reinterpret_cast(procAddr(device, "vkCmdWaitEvents2")); #endif #if (defined(VK_VERSION_1_3)) - fp_vkCmdPipelineBarrier2 = reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier2")); + fp_vkCmdPipelineBarrier2 = + reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier2")); #endif #if (defined(VK_VERSION_1_3)) fp_vkQueueSubmit2 = reinterpret_cast(procAddr(device, "vkQueueSubmit2")); @@ -2358,142 +3191,189 @@ struct DispatchTable { fp_vkCmdWriteTimestamp2 = reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp2")); #endif #if (defined(VK_KHR_synchronization2)) - fp_vkCmdWriteBufferMarker2AMD = reinterpret_cast(procAddr(device, "vkCmdWriteBufferMarker2AMD")); + fp_vkCmdWriteBufferMarker2AMD = + reinterpret_cast(procAddr(device, "vkCmdWriteBufferMarker2AMD")); #endif #if (defined(VK_KHR_synchronization2)) - fp_vkGetQueueCheckpointData2NV = reinterpret_cast(procAddr(device, "vkGetQueueCheckpointData2NV")); + fp_vkGetQueueCheckpointData2NV = + reinterpret_cast(procAddr(device, "vkGetQueueCheckpointData2NV")); #endif #if (defined(VK_EXT_host_image_copy)) - fp_vkCopyMemoryToImageEXT = reinterpret_cast(procAddr(device, "vkCopyMemoryToImageEXT")); + fp_vkCopyMemoryToImageEXT = + reinterpret_cast(procAddr(device, "vkCopyMemoryToImageEXT")); #endif #if (defined(VK_EXT_host_image_copy)) - fp_vkCopyImageToMemoryEXT = reinterpret_cast(procAddr(device, "vkCopyImageToMemoryEXT")); + fp_vkCopyImageToMemoryEXT = + reinterpret_cast(procAddr(device, "vkCopyImageToMemoryEXT")); #endif #if (defined(VK_EXT_host_image_copy)) - fp_vkCopyImageToImageEXT = reinterpret_cast(procAddr(device, "vkCopyImageToImageEXT")); + fp_vkCopyImageToImageEXT = + reinterpret_cast(procAddr(device, "vkCopyImageToImageEXT")); #endif #if (defined(VK_EXT_host_image_copy)) - fp_vkTransitionImageLayoutEXT = reinterpret_cast(procAddr(device, "vkTransitionImageLayoutEXT")); + fp_vkTransitionImageLayoutEXT = + reinterpret_cast(procAddr(device, "vkTransitionImageLayoutEXT")); #endif #if (defined(VKSC_VERSION_1_0)) - fp_vkGetCommandPoolMemoryConsumption = reinterpret_cast(procAddr(device, "vkGetCommandPoolMemoryConsumption")); + fp_vkGetCommandPoolMemoryConsumption = reinterpret_cast( + procAddr(device, "vkGetCommandPoolMemoryConsumption")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkCreateVideoSessionKHR = reinterpret_cast(procAddr(device, "vkCreateVideoSessionKHR")); + fp_vkCreateVideoSessionKHR = + reinterpret_cast(procAddr(device, "vkCreateVideoSessionKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkDestroyVideoSessionKHR = reinterpret_cast(procAddr(device, "vkDestroyVideoSessionKHR")); + fp_vkDestroyVideoSessionKHR = + reinterpret_cast(procAddr(device, "vkDestroyVideoSessionKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkCreateVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkCreateVideoSessionParametersKHR")); + fp_vkCreateVideoSessionParametersKHR = reinterpret_cast( + procAddr(device, "vkCreateVideoSessionParametersKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkUpdateVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkUpdateVideoSessionParametersKHR")); + fp_vkUpdateVideoSessionParametersKHR = reinterpret_cast( + procAddr(device, "vkUpdateVideoSessionParametersKHR")); #endif #if (defined(VK_KHR_video_encode_queue)) - fp_vkGetEncodedVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkGetEncodedVideoSessionParametersKHR")); + fp_vkGetEncodedVideoSessionParametersKHR = reinterpret_cast( + procAddr(device, "vkGetEncodedVideoSessionParametersKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkDestroyVideoSessionParametersKHR = reinterpret_cast(procAddr(device, "vkDestroyVideoSessionParametersKHR")); + fp_vkDestroyVideoSessionParametersKHR = reinterpret_cast( + procAddr(device, "vkDestroyVideoSessionParametersKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkGetVideoSessionMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetVideoSessionMemoryRequirementsKHR")); + fp_vkGetVideoSessionMemoryRequirementsKHR = reinterpret_cast( + procAddr(device, "vkGetVideoSessionMemoryRequirementsKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkBindVideoSessionMemoryKHR = reinterpret_cast(procAddr(device, "vkBindVideoSessionMemoryKHR")); + fp_vkBindVideoSessionMemoryKHR = + reinterpret_cast(procAddr(device, "vkBindVideoSessionMemoryKHR")); #endif #if (defined(VK_KHR_video_decode_queue)) fp_vkCmdDecodeVideoKHR = reinterpret_cast(procAddr(device, "vkCmdDecodeVideoKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkCmdBeginVideoCodingKHR = reinterpret_cast(procAddr(device, "vkCmdBeginVideoCodingKHR")); + fp_vkCmdBeginVideoCodingKHR = + reinterpret_cast(procAddr(device, "vkCmdBeginVideoCodingKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkCmdControlVideoCodingKHR = reinterpret_cast(procAddr(device, "vkCmdControlVideoCodingKHR")); + fp_vkCmdControlVideoCodingKHR = + reinterpret_cast(procAddr(device, "vkCmdControlVideoCodingKHR")); #endif #if (defined(VK_KHR_video_queue)) - fp_vkCmdEndVideoCodingKHR = reinterpret_cast(procAddr(device, "vkCmdEndVideoCodingKHR")); + fp_vkCmdEndVideoCodingKHR = + reinterpret_cast(procAddr(device, "vkCmdEndVideoCodingKHR")); #endif #if (defined(VK_KHR_video_encode_queue)) fp_vkCmdEncodeVideoKHR = reinterpret_cast(procAddr(device, "vkCmdEncodeVideoKHR")); #endif #if (defined(VK_NV_memory_decompression)) - fp_vkCmdDecompressMemoryNV = reinterpret_cast(procAddr(device, "vkCmdDecompressMemoryNV")); + fp_vkCmdDecompressMemoryNV = + reinterpret_cast(procAddr(device, "vkCmdDecompressMemoryNV")); #endif #if (defined(VK_NV_memory_decompression)) - fp_vkCmdDecompressMemoryIndirectCountNV = reinterpret_cast(procAddr(device, "vkCmdDecompressMemoryIndirectCountNV")); + fp_vkCmdDecompressMemoryIndirectCountNV = reinterpret_cast( + procAddr(device, "vkCmdDecompressMemoryIndirectCountNV")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetDescriptorSetLayoutSizeEXT = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSizeEXT")); + fp_vkGetDescriptorSetLayoutSizeEXT = + reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSizeEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetDescriptorSetLayoutBindingOffsetEXT = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutBindingOffsetEXT")); + fp_vkGetDescriptorSetLayoutBindingOffsetEXT = reinterpret_cast( + procAddr(device, "vkGetDescriptorSetLayoutBindingOffsetEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) fp_vkGetDescriptorEXT = reinterpret_cast(procAddr(device, "vkGetDescriptorEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkCmdBindDescriptorBuffersEXT = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBuffersEXT")); + fp_vkCmdBindDescriptorBuffersEXT = + reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBuffersEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkCmdSetDescriptorBufferOffsetsEXT = reinterpret_cast(procAddr(device, "vkCmdSetDescriptorBufferOffsetsEXT")); + fp_vkCmdSetDescriptorBufferOffsetsEXT = reinterpret_cast( + procAddr(device, "vkCmdSetDescriptorBufferOffsetsEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT")); + fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + reinterpret_cast( + procAddr(device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetBufferOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureDescriptorDataEXT")); + fp_vkGetBufferOpaqueCaptureDescriptorDataEXT = reinterpret_cast( + procAddr(device, "vkGetBufferOpaqueCaptureDescriptorDataEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetImageOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetImageOpaqueCaptureDescriptorDataEXT")); + fp_vkGetImageOpaqueCaptureDescriptorDataEXT = reinterpret_cast( + procAddr(device, "vkGetImageOpaqueCaptureDescriptorDataEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT")); + fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT = + reinterpret_cast( + procAddr(device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT")); + fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT = + reinterpret_cast( + procAddr(device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT")); #endif #if (defined(VK_EXT_descriptor_buffer)) - fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = reinterpret_cast(procAddr(device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT")); + fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = + reinterpret_cast( + procAddr(device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT")); #endif #if (defined(VK_EXT_pageable_device_local_memory)) - fp_vkSetDeviceMemoryPriorityEXT = reinterpret_cast(procAddr(device, "vkSetDeviceMemoryPriorityEXT")); + fp_vkSetDeviceMemoryPriorityEXT = + reinterpret_cast(procAddr(device, "vkSetDeviceMemoryPriorityEXT")); #endif #if (defined(VK_KHR_present_wait)) fp_vkWaitForPresentKHR = reinterpret_cast(procAddr(device, "vkWaitForPresentKHR")); #endif #if (defined(VK_FUCHSIA_buffer_collection)) - fp_vkCreateBufferCollectionFUCHSIA = reinterpret_cast(procAddr(device, "vkCreateBufferCollectionFUCHSIA")); + fp_vkCreateBufferCollectionFUCHSIA = + reinterpret_cast(procAddr(device, "vkCreateBufferCollectionFUCHSIA")); #endif #if (defined(VK_FUCHSIA_buffer_collection)) - fp_vkSetBufferCollectionBufferConstraintsFUCHSIA = reinterpret_cast(procAddr(device, "vkSetBufferCollectionBufferConstraintsFUCHSIA")); + fp_vkSetBufferCollectionBufferConstraintsFUCHSIA = + reinterpret_cast( + procAddr(device, "vkSetBufferCollectionBufferConstraintsFUCHSIA")); #endif #if (defined(VK_FUCHSIA_buffer_collection)) - fp_vkSetBufferCollectionImageConstraintsFUCHSIA = reinterpret_cast(procAddr(device, "vkSetBufferCollectionImageConstraintsFUCHSIA")); + fp_vkSetBufferCollectionImageConstraintsFUCHSIA = + reinterpret_cast( + procAddr(device, "vkSetBufferCollectionImageConstraintsFUCHSIA")); #endif #if (defined(VK_FUCHSIA_buffer_collection)) - fp_vkDestroyBufferCollectionFUCHSIA = reinterpret_cast(procAddr(device, "vkDestroyBufferCollectionFUCHSIA")); + fp_vkDestroyBufferCollectionFUCHSIA = reinterpret_cast( + procAddr(device, "vkDestroyBufferCollectionFUCHSIA")); #endif #if (defined(VK_FUCHSIA_buffer_collection)) - fp_vkGetBufferCollectionPropertiesFUCHSIA = reinterpret_cast(procAddr(device, "vkGetBufferCollectionPropertiesFUCHSIA")); + fp_vkGetBufferCollectionPropertiesFUCHSIA = reinterpret_cast( + procAddr(device, "vkGetBufferCollectionPropertiesFUCHSIA")); #endif #if (defined(VK_NV_cuda_kernel_launch)) fp_vkCreateCudaModuleNV = reinterpret_cast(procAddr(device, "vkCreateCudaModuleNV")); #endif #if (defined(VK_NV_cuda_kernel_launch)) - fp_vkGetCudaModuleCacheNV = reinterpret_cast(procAddr(device, "vkGetCudaModuleCacheNV")); + fp_vkGetCudaModuleCacheNV = + reinterpret_cast(procAddr(device, "vkGetCudaModuleCacheNV")); #endif #if (defined(VK_NV_cuda_kernel_launch)) - fp_vkCreateCudaFunctionNV = reinterpret_cast(procAddr(device, "vkCreateCudaFunctionNV")); + fp_vkCreateCudaFunctionNV = + reinterpret_cast(procAddr(device, "vkCreateCudaFunctionNV")); #endif #if (defined(VK_NV_cuda_kernel_launch)) - fp_vkDestroyCudaModuleNV = reinterpret_cast(procAddr(device, "vkDestroyCudaModuleNV")); + fp_vkDestroyCudaModuleNV = + reinterpret_cast(procAddr(device, "vkDestroyCudaModuleNV")); #endif #if (defined(VK_NV_cuda_kernel_launch)) - fp_vkDestroyCudaFunctionNV = reinterpret_cast(procAddr(device, "vkDestroyCudaFunctionNV")); + fp_vkDestroyCudaFunctionNV = + reinterpret_cast(procAddr(device, "vkDestroyCudaFunctionNV")); #endif #if (defined(VK_NV_cuda_kernel_launch)) - fp_vkCmdCudaLaunchKernelNV = reinterpret_cast(procAddr(device, "vkCmdCudaLaunchKernelNV")); + fp_vkCmdCudaLaunchKernelNV = + reinterpret_cast(procAddr(device, "vkCmdCudaLaunchKernelNV")); #endif #if (defined(VK_VERSION_1_3)) fp_vkCmdBeginRendering = reinterpret_cast(procAddr(device, "vkCmdBeginRendering")); @@ -2502,16 +3382,20 @@ struct DispatchTable { fp_vkCmdEndRendering = reinterpret_cast(procAddr(device, "vkCmdEndRendering")); #endif #if (defined(VK_VALVE_descriptor_set_host_mapping)) - fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE")); + fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE = + reinterpret_cast( + procAddr(device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE")); #endif #if (defined(VK_VALVE_descriptor_set_host_mapping)) - fp_vkGetDescriptorSetHostMappingVALVE = reinterpret_cast(procAddr(device, "vkGetDescriptorSetHostMappingVALVE")); + fp_vkGetDescriptorSetHostMappingVALVE = reinterpret_cast( + procAddr(device, "vkGetDescriptorSetHostMappingVALVE")); #endif #if (defined(VK_EXT_opacity_micromap)) fp_vkCreateMicromapEXT = reinterpret_cast(procAddr(device, "vkCreateMicromapEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkCmdBuildMicromapsEXT = reinterpret_cast(procAddr(device, "vkCmdBuildMicromapsEXT")); + fp_vkCmdBuildMicromapsEXT = + reinterpret_cast(procAddr(device, "vkCmdBuildMicromapsEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) fp_vkBuildMicromapsEXT = reinterpret_cast(procAddr(device, "vkBuildMicromapsEXT")); @@ -2526,73 +3410,96 @@ struct DispatchTable { fp_vkCopyMicromapEXT = reinterpret_cast(procAddr(device, "vkCopyMicromapEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkCmdCopyMicromapToMemoryEXT = reinterpret_cast(procAddr(device, "vkCmdCopyMicromapToMemoryEXT")); + fp_vkCmdCopyMicromapToMemoryEXT = + reinterpret_cast(procAddr(device, "vkCmdCopyMicromapToMemoryEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkCopyMicromapToMemoryEXT = reinterpret_cast(procAddr(device, "vkCopyMicromapToMemoryEXT")); + fp_vkCopyMicromapToMemoryEXT = + reinterpret_cast(procAddr(device, "vkCopyMicromapToMemoryEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkCmdCopyMemoryToMicromapEXT = reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToMicromapEXT")); + fp_vkCmdCopyMemoryToMicromapEXT = + reinterpret_cast(procAddr(device, "vkCmdCopyMemoryToMicromapEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkCopyMemoryToMicromapEXT = reinterpret_cast(procAddr(device, "vkCopyMemoryToMicromapEXT")); + fp_vkCopyMemoryToMicromapEXT = + reinterpret_cast(procAddr(device, "vkCopyMemoryToMicromapEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkCmdWriteMicromapsPropertiesEXT = reinterpret_cast(procAddr(device, "vkCmdWriteMicromapsPropertiesEXT")); + fp_vkCmdWriteMicromapsPropertiesEXT = reinterpret_cast( + procAddr(device, "vkCmdWriteMicromapsPropertiesEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkWriteMicromapsPropertiesEXT = reinterpret_cast(procAddr(device, "vkWriteMicromapsPropertiesEXT")); + fp_vkWriteMicromapsPropertiesEXT = + reinterpret_cast(procAddr(device, "vkWriteMicromapsPropertiesEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkGetDeviceMicromapCompatibilityEXT = reinterpret_cast(procAddr(device, "vkGetDeviceMicromapCompatibilityEXT")); + fp_vkGetDeviceMicromapCompatibilityEXT = reinterpret_cast( + procAddr(device, "vkGetDeviceMicromapCompatibilityEXT")); #endif #if (defined(VK_EXT_opacity_micromap)) - fp_vkGetMicromapBuildSizesEXT = reinterpret_cast(procAddr(device, "vkGetMicromapBuildSizesEXT")); + fp_vkGetMicromapBuildSizesEXT = + reinterpret_cast(procAddr(device, "vkGetMicromapBuildSizesEXT")); #endif #if (defined(VK_EXT_shader_module_identifier)) - fp_vkGetShaderModuleIdentifierEXT = reinterpret_cast(procAddr(device, "vkGetShaderModuleIdentifierEXT")); + fp_vkGetShaderModuleIdentifierEXT = + reinterpret_cast(procAddr(device, "vkGetShaderModuleIdentifierEXT")); #endif #if (defined(VK_EXT_shader_module_identifier)) - fp_vkGetShaderModuleCreateInfoIdentifierEXT = reinterpret_cast(procAddr(device, "vkGetShaderModuleCreateInfoIdentifierEXT")); + fp_vkGetShaderModuleCreateInfoIdentifierEXT = reinterpret_cast( + procAddr(device, "vkGetShaderModuleCreateInfoIdentifierEXT")); #endif #if (defined(VK_KHR_maintenance5)) - fp_vkGetImageSubresourceLayout2KHR = reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout2KHR")); + fp_vkGetImageSubresourceLayout2KHR = + reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout2KHR")); #endif #if (defined(VK_EXT_pipeline_properties)) - fp_vkGetPipelinePropertiesEXT = reinterpret_cast(procAddr(device, "vkGetPipelinePropertiesEXT")); + fp_vkGetPipelinePropertiesEXT = + reinterpret_cast(procAddr(device, "vkGetPipelinePropertiesEXT")); #endif #if (defined(VK_EXT_metal_objects)) - fp_vkExportMetalObjectsEXT = reinterpret_cast(procAddr(device, "vkExportMetalObjectsEXT")); + fp_vkExportMetalObjectsEXT = + reinterpret_cast(procAddr(device, "vkExportMetalObjectsEXT")); #endif #if (defined(VK_QCOM_tile_properties)) - fp_vkGetFramebufferTilePropertiesQCOM = reinterpret_cast(procAddr(device, "vkGetFramebufferTilePropertiesQCOM")); + fp_vkGetFramebufferTilePropertiesQCOM = reinterpret_cast( + procAddr(device, "vkGetFramebufferTilePropertiesQCOM")); #endif #if (defined(VK_QCOM_tile_properties)) - fp_vkGetDynamicRenderingTilePropertiesQCOM = reinterpret_cast(procAddr(device, "vkGetDynamicRenderingTilePropertiesQCOM")); + fp_vkGetDynamicRenderingTilePropertiesQCOM = reinterpret_cast( + procAddr(device, "vkGetDynamicRenderingTilePropertiesQCOM")); #endif #if (defined(VK_NV_optical_flow)) - fp_vkCreateOpticalFlowSessionNV = reinterpret_cast(procAddr(device, "vkCreateOpticalFlowSessionNV")); + fp_vkCreateOpticalFlowSessionNV = + reinterpret_cast(procAddr(device, "vkCreateOpticalFlowSessionNV")); #endif #if (defined(VK_NV_optical_flow)) - fp_vkDestroyOpticalFlowSessionNV = reinterpret_cast(procAddr(device, "vkDestroyOpticalFlowSessionNV")); + fp_vkDestroyOpticalFlowSessionNV = + reinterpret_cast(procAddr(device, "vkDestroyOpticalFlowSessionNV")); #endif #if (defined(VK_NV_optical_flow)) - fp_vkBindOpticalFlowSessionImageNV = reinterpret_cast(procAddr(device, "vkBindOpticalFlowSessionImageNV")); + fp_vkBindOpticalFlowSessionImageNV = + reinterpret_cast(procAddr(device, "vkBindOpticalFlowSessionImageNV")); #endif #if (defined(VK_NV_optical_flow)) - fp_vkCmdOpticalFlowExecuteNV = reinterpret_cast(procAddr(device, "vkCmdOpticalFlowExecuteNV")); + fp_vkCmdOpticalFlowExecuteNV = + reinterpret_cast(procAddr(device, "vkCmdOpticalFlowExecuteNV")); #endif #if (defined(VK_EXT_device_fault)) - fp_vkGetDeviceFaultInfoEXT = reinterpret_cast(procAddr(device, "vkGetDeviceFaultInfoEXT")); + fp_vkGetDeviceFaultInfoEXT = + reinterpret_cast(procAddr(device, "vkGetDeviceFaultInfoEXT")); #endif #if (defined(VK_EXT_depth_bias_control)) - fp_vkCmdSetDepthBias2EXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthBias2EXT")); + fp_vkCmdSetDepthBias2EXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthBias2EXT")); #endif #if (defined(VK_EXT_swapchain_maintenance1)) - fp_vkReleaseSwapchainImagesEXT = reinterpret_cast(procAddr(device, "vkReleaseSwapchainImagesEXT")); + fp_vkReleaseSwapchainImagesEXT = + reinterpret_cast(procAddr(device, "vkReleaseSwapchainImagesEXT")); #endif #if (defined(VK_KHR_maintenance5)) - fp_vkGetDeviceImageSubresourceLayoutKHR = reinterpret_cast(procAddr(device, "vkGetDeviceImageSubresourceLayoutKHR")); + fp_vkGetDeviceImageSubresourceLayoutKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceImageSubresourceLayoutKHR")); #endif #if (defined(VK_KHR_map_memory2)) fp_vkMapMemory2KHR = reinterpret_cast(procAddr(device, "vkMapMemory2KHR")); @@ -2607,55 +3514,73 @@ struct DispatchTable { fp_vkDestroyShaderEXT = reinterpret_cast(procAddr(device, "vkDestroyShaderEXT")); #endif #if (defined(VK_EXT_shader_object)) - fp_vkGetShaderBinaryDataEXT = reinterpret_cast(procAddr(device, "vkGetShaderBinaryDataEXT")); + fp_vkGetShaderBinaryDataEXT = + reinterpret_cast(procAddr(device, "vkGetShaderBinaryDataEXT")); #endif #if (defined(VK_EXT_shader_object)) fp_vkCmdBindShadersEXT = reinterpret_cast(procAddr(device, "vkCmdBindShadersEXT")); #endif #if (defined(VK_QNX_external_memory_screen_buffer)) - fp_vkGetScreenBufferPropertiesQNX = reinterpret_cast(procAddr(device, "vkGetScreenBufferPropertiesQNX")); + fp_vkGetScreenBufferPropertiesQNX = + reinterpret_cast(procAddr(device, "vkGetScreenBufferPropertiesQNX")); #endif #if (defined(VK_AMDX_shader_enqueue)) - fp_vkGetExecutionGraphPipelineScratchSizeAMDX = reinterpret_cast(procAddr(device, "vkGetExecutionGraphPipelineScratchSizeAMDX")); + fp_vkGetExecutionGraphPipelineScratchSizeAMDX = + reinterpret_cast( + procAddr(device, "vkGetExecutionGraphPipelineScratchSizeAMDX")); #endif #if (defined(VK_AMDX_shader_enqueue)) - fp_vkGetExecutionGraphPipelineNodeIndexAMDX = reinterpret_cast(procAddr(device, "vkGetExecutionGraphPipelineNodeIndexAMDX")); + fp_vkGetExecutionGraphPipelineNodeIndexAMDX = reinterpret_cast( + procAddr(device, "vkGetExecutionGraphPipelineNodeIndexAMDX")); #endif #if (defined(VK_AMDX_shader_enqueue)) - fp_vkCreateExecutionGraphPipelinesAMDX = reinterpret_cast(procAddr(device, "vkCreateExecutionGraphPipelinesAMDX")); + fp_vkCreateExecutionGraphPipelinesAMDX = reinterpret_cast( + procAddr(device, "vkCreateExecutionGraphPipelinesAMDX")); #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - fp_vkCmdInitializeGraphScratchMemoryAMDX = reinterpret_cast(procAddr(device, "vkCmdInitializeGraphScratchMemoryAMDX")); + fp_vkCmdInitializeGraphScratchMemoryAMDX = reinterpret_cast( + procAddr(device, "vkCmdInitializeGraphScratchMemoryAMDX")); #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - fp_vkCmdDispatchGraphAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphAMDX")); + fp_vkCmdDispatchGraphAMDX = + reinterpret_cast(procAddr(device, "vkCmdDispatchGraphAMDX")); #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - fp_vkCmdDispatchGraphIndirectAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectAMDX")); + fp_vkCmdDispatchGraphIndirectAMDX = + reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectAMDX")); #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - fp_vkCmdDispatchGraphIndirectCountAMDX = reinterpret_cast(procAddr(device, "vkCmdDispatchGraphIndirectCountAMDX")); + fp_vkCmdDispatchGraphIndirectCountAMDX = reinterpret_cast( + procAddr(device, "vkCmdDispatchGraphIndirectCountAMDX")); #endif #if (defined(VK_KHR_maintenance6)) - fp_vkCmdBindDescriptorSets2KHR = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorSets2KHR")); + fp_vkCmdBindDescriptorSets2KHR = + reinterpret_cast(procAddr(device, "vkCmdBindDescriptorSets2KHR")); #endif #if (defined(VK_KHR_maintenance6)) - fp_vkCmdPushConstants2KHR = reinterpret_cast(procAddr(device, "vkCmdPushConstants2KHR")); + fp_vkCmdPushConstants2KHR = + reinterpret_cast(procAddr(device, "vkCmdPushConstants2KHR")); #endif #if (defined(VK_KHR_maintenance6)) - fp_vkCmdPushDescriptorSet2KHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSet2KHR")); + fp_vkCmdPushDescriptorSet2KHR = + reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSet2KHR")); #endif #if (defined(VK_KHR_maintenance6)) - fp_vkCmdPushDescriptorSetWithTemplate2KHR = reinterpret_cast(procAddr(device, "vkCmdPushDescriptorSetWithTemplate2KHR")); + fp_vkCmdPushDescriptorSetWithTemplate2KHR = reinterpret_cast( + procAddr(device, "vkCmdPushDescriptorSetWithTemplate2KHR")); #endif #if (defined(VK_KHR_maintenance6)) - fp_vkCmdSetDescriptorBufferOffsets2EXT = reinterpret_cast(procAddr(device, "vkCmdSetDescriptorBufferOffsets2EXT")); + fp_vkCmdSetDescriptorBufferOffsets2EXT = reinterpret_cast( + procAddr(device, "vkCmdSetDescriptorBufferOffsets2EXT")); #endif #if (defined(VK_KHR_maintenance6)) - fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = reinterpret_cast(procAddr(device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT")); + fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + reinterpret_cast( + procAddr(device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT")); #endif #if (defined(VK_NV_low_latency2)) - fp_vkSetLatencySleepModeNV = reinterpret_cast(procAddr(device, "vkSetLatencySleepModeNV")); + fp_vkSetLatencySleepModeNV = + reinterpret_cast(procAddr(device, "vkSetLatencySleepModeNV")); #endif #if (defined(VK_NV_low_latency2)) fp_vkLatencySleepNV = reinterpret_cast(procAddr(device, "vkLatencySleepNV")); @@ -2664,19 +3589,25 @@ struct DispatchTable { fp_vkSetLatencyMarkerNV = reinterpret_cast(procAddr(device, "vkSetLatencyMarkerNV")); #endif #if ((defined(VK_NV_low_latency2))) && VK_HEADER_VERSION >= 271 - fp_vkGetLatencyTimingsNV = reinterpret_cast(procAddr(device, "vkGetLatencyTimingsNV")); + fp_vkGetLatencyTimingsNV = + reinterpret_cast(procAddr(device, "vkGetLatencyTimingsNV")); #endif #if (defined(VK_NV_low_latency2)) - fp_vkQueueNotifyOutOfBandNV = reinterpret_cast(procAddr(device, "vkQueueNotifyOutOfBandNV")); + fp_vkQueueNotifyOutOfBandNV = + reinterpret_cast(procAddr(device, "vkQueueNotifyOutOfBandNV")); #endif #if (defined(VK_KHR_dynamic_rendering_local_read)) - fp_vkCmdSetRenderingAttachmentLocationsKHR = reinterpret_cast(procAddr(device, "vkCmdSetRenderingAttachmentLocationsKHR")); + fp_vkCmdSetRenderingAttachmentLocationsKHR = reinterpret_cast( + procAddr(device, "vkCmdSetRenderingAttachmentLocationsKHR")); #endif #if (defined(VK_KHR_dynamic_rendering_local_read)) - fp_vkCmdSetRenderingInputAttachmentIndicesKHR = reinterpret_cast(procAddr(device, "vkCmdSetRenderingInputAttachmentIndicesKHR")); + fp_vkCmdSetRenderingInputAttachmentIndicesKHR = + reinterpret_cast( + procAddr(device, "vkCmdSetRenderingInputAttachmentIndicesKHR")); #endif #if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_depth_clamp_control)) - fp_vkCmdSetDepthClampRangeEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthClampRangeEXT")); + fp_vkCmdSetDepthClampRangeEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthClampRangeEXT")); #endif #if (defined(VK_EXT_host_query_reset)) fp_vkResetQueryPoolEXT = reinterpret_cast(procAddr(device, "vkResetQueryPoolEXT")); @@ -2685,73 +3616,95 @@ struct DispatchTable { fp_vkTrimCommandPoolKHR = reinterpret_cast(procAddr(device, "vkTrimCommandPoolKHR")); #endif #if (defined(VK_KHR_device_group)) - fp_vkGetDeviceGroupPeerMemoryFeaturesKHR = reinterpret_cast(procAddr(device, "vkGetDeviceGroupPeerMemoryFeaturesKHR")); + fp_vkGetDeviceGroupPeerMemoryFeaturesKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceGroupPeerMemoryFeaturesKHR")); #endif #if (defined(VK_KHR_bind_memory2)) - fp_vkBindBufferMemory2KHR = reinterpret_cast(procAddr(device, "vkBindBufferMemory2KHR")); + fp_vkBindBufferMemory2KHR = + reinterpret_cast(procAddr(device, "vkBindBufferMemory2KHR")); #endif #if (defined(VK_KHR_bind_memory2)) - fp_vkBindImageMemory2KHR = reinterpret_cast(procAddr(device, "vkBindImageMemory2KHR")); + fp_vkBindImageMemory2KHR = + reinterpret_cast(procAddr(device, "vkBindImageMemory2KHR")); #endif #if (defined(VK_KHR_device_group)) - fp_vkCmdSetDeviceMaskKHR = reinterpret_cast(procAddr(device, "vkCmdSetDeviceMaskKHR")); + fp_vkCmdSetDeviceMaskKHR = + reinterpret_cast(procAddr(device, "vkCmdSetDeviceMaskKHR")); #endif #if (defined(VK_KHR_device_group)) fp_vkCmdDispatchBaseKHR = reinterpret_cast(procAddr(device, "vkCmdDispatchBaseKHR")); #endif #if (defined(VK_KHR_descriptor_update_template)) - fp_vkCreateDescriptorUpdateTemplateKHR = reinterpret_cast(procAddr(device, "vkCreateDescriptorUpdateTemplateKHR")); + fp_vkCreateDescriptorUpdateTemplateKHR = reinterpret_cast( + procAddr(device, "vkCreateDescriptorUpdateTemplateKHR")); #endif #if (defined(VK_KHR_descriptor_update_template)) - fp_vkDestroyDescriptorUpdateTemplateKHR = reinterpret_cast(procAddr(device, "vkDestroyDescriptorUpdateTemplateKHR")); + fp_vkDestroyDescriptorUpdateTemplateKHR = reinterpret_cast( + procAddr(device, "vkDestroyDescriptorUpdateTemplateKHR")); #endif #if (defined(VK_KHR_descriptor_update_template)) - fp_vkUpdateDescriptorSetWithTemplateKHR = reinterpret_cast(procAddr(device, "vkUpdateDescriptorSetWithTemplateKHR")); + fp_vkUpdateDescriptorSetWithTemplateKHR = reinterpret_cast( + procAddr(device, "vkUpdateDescriptorSetWithTemplateKHR")); #endif #if (defined(VK_KHR_get_memory_requirements2)) - fp_vkGetBufferMemoryRequirements2KHR = reinterpret_cast(procAddr(device, "vkGetBufferMemoryRequirements2KHR")); + fp_vkGetBufferMemoryRequirements2KHR = reinterpret_cast( + procAddr(device, "vkGetBufferMemoryRequirements2KHR")); #endif #if (defined(VK_KHR_get_memory_requirements2)) - fp_vkGetImageMemoryRequirements2KHR = reinterpret_cast(procAddr(device, "vkGetImageMemoryRequirements2KHR")); + fp_vkGetImageMemoryRequirements2KHR = reinterpret_cast( + procAddr(device, "vkGetImageMemoryRequirements2KHR")); #endif #if (defined(VK_KHR_get_memory_requirements2)) - fp_vkGetImageSparseMemoryRequirements2KHR = reinterpret_cast(procAddr(device, "vkGetImageSparseMemoryRequirements2KHR")); + fp_vkGetImageSparseMemoryRequirements2KHR = reinterpret_cast( + procAddr(device, "vkGetImageSparseMemoryRequirements2KHR")); #endif #if (defined(VK_KHR_maintenance4)) - fp_vkGetDeviceBufferMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetDeviceBufferMemoryRequirementsKHR")); + fp_vkGetDeviceBufferMemoryRequirementsKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceBufferMemoryRequirementsKHR")); #endif #if (defined(VK_KHR_maintenance4)) - fp_vkGetDeviceImageMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetDeviceImageMemoryRequirementsKHR")); + fp_vkGetDeviceImageMemoryRequirementsKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceImageMemoryRequirementsKHR")); #endif #if (defined(VK_KHR_maintenance4)) - fp_vkGetDeviceImageSparseMemoryRequirementsKHR = reinterpret_cast(procAddr(device, "vkGetDeviceImageSparseMemoryRequirementsKHR")); + fp_vkGetDeviceImageSparseMemoryRequirementsKHR = + reinterpret_cast( + procAddr(device, "vkGetDeviceImageSparseMemoryRequirementsKHR")); #endif #if (defined(VK_KHR_sampler_ycbcr_conversion)) - fp_vkCreateSamplerYcbcrConversionKHR = reinterpret_cast(procAddr(device, "vkCreateSamplerYcbcrConversionKHR")); + fp_vkCreateSamplerYcbcrConversionKHR = reinterpret_cast( + procAddr(device, "vkCreateSamplerYcbcrConversionKHR")); #endif #if (defined(VK_KHR_sampler_ycbcr_conversion)) - fp_vkDestroySamplerYcbcrConversionKHR = reinterpret_cast(procAddr(device, "vkDestroySamplerYcbcrConversionKHR")); + fp_vkDestroySamplerYcbcrConversionKHR = reinterpret_cast( + procAddr(device, "vkDestroySamplerYcbcrConversionKHR")); #endif #if (defined(VK_KHR_maintenance3)) - fp_vkGetDescriptorSetLayoutSupportKHR = reinterpret_cast(procAddr(device, "vkGetDescriptorSetLayoutSupportKHR")); + fp_vkGetDescriptorSetLayoutSupportKHR = reinterpret_cast( + procAddr(device, "vkGetDescriptorSetLayoutSupportKHR")); #endif #if (defined(VK_EXT_calibrated_timestamps)) - fp_vkGetCalibratedTimestampsEXT = reinterpret_cast(procAddr(device, "vkGetCalibratedTimestampsEXT")); + fp_vkGetCalibratedTimestampsEXT = + reinterpret_cast(procAddr(device, "vkGetCalibratedTimestampsEXT")); #endif #if (defined(VK_KHR_create_renderpass2)) - fp_vkCreateRenderPass2KHR = reinterpret_cast(procAddr(device, "vkCreateRenderPass2KHR")); + fp_vkCreateRenderPass2KHR = + reinterpret_cast(procAddr(device, "vkCreateRenderPass2KHR")); #endif #if (defined(VK_KHR_create_renderpass2)) - fp_vkCmdBeginRenderPass2KHR = reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass2KHR")); + fp_vkCmdBeginRenderPass2KHR = + reinterpret_cast(procAddr(device, "vkCmdBeginRenderPass2KHR")); #endif #if (defined(VK_KHR_create_renderpass2)) fp_vkCmdNextSubpass2KHR = reinterpret_cast(procAddr(device, "vkCmdNextSubpass2KHR")); #endif #if (defined(VK_KHR_create_renderpass2)) - fp_vkCmdEndRenderPass2KHR = reinterpret_cast(procAddr(device, "vkCmdEndRenderPass2KHR")); + fp_vkCmdEndRenderPass2KHR = + reinterpret_cast(procAddr(device, "vkCmdEndRenderPass2KHR")); #endif #if (defined(VK_KHR_timeline_semaphore)) - fp_vkGetSemaphoreCounterValueKHR = reinterpret_cast(procAddr(device, "vkGetSemaphoreCounterValueKHR")); + fp_vkGetSemaphoreCounterValueKHR = + reinterpret_cast(procAddr(device, "vkGetSemaphoreCounterValueKHR")); #endif #if (defined(VK_KHR_timeline_semaphore)) fp_vkWaitSemaphoresKHR = reinterpret_cast(procAddr(device, "vkWaitSemaphoresKHR")); @@ -2760,25 +3713,32 @@ struct DispatchTable { fp_vkSignalSemaphoreKHR = reinterpret_cast(procAddr(device, "vkSignalSemaphoreKHR")); #endif #if (defined(VK_AMD_draw_indirect_count)) - fp_vkCmdDrawIndirectCountAMD = reinterpret_cast(procAddr(device, "vkCmdDrawIndirectCountAMD")); + fp_vkCmdDrawIndirectCountAMD = + reinterpret_cast(procAddr(device, "vkCmdDrawIndirectCountAMD")); #endif #if (defined(VK_AMD_draw_indirect_count)) - fp_vkCmdDrawIndexedIndirectCountAMD = reinterpret_cast(procAddr(device, "vkCmdDrawIndexedIndirectCountAMD")); + fp_vkCmdDrawIndexedIndirectCountAMD = reinterpret_cast( + procAddr(device, "vkCmdDrawIndexedIndirectCountAMD")); #endif #if (defined(VK_NV_ray_tracing)) - fp_vkGetRayTracingShaderGroupHandlesNV = reinterpret_cast(procAddr(device, "vkGetRayTracingShaderGroupHandlesNV")); + fp_vkGetRayTracingShaderGroupHandlesNV = reinterpret_cast( + procAddr(device, "vkGetRayTracingShaderGroupHandlesNV")); #endif #if (defined(VK_KHR_buffer_device_address)) - fp_vkGetBufferOpaqueCaptureAddressKHR = reinterpret_cast(procAddr(device, "vkGetBufferOpaqueCaptureAddressKHR")); + fp_vkGetBufferOpaqueCaptureAddressKHR = reinterpret_cast( + procAddr(device, "vkGetBufferOpaqueCaptureAddressKHR")); #endif #if (defined(VK_EXT_buffer_device_address)) - fp_vkGetBufferDeviceAddressEXT = reinterpret_cast(procAddr(device, "vkGetBufferDeviceAddressEXT")); + fp_vkGetBufferDeviceAddressEXT = + reinterpret_cast(procAddr(device, "vkGetBufferDeviceAddressEXT")); #endif #if (defined(VK_KHR_buffer_device_address)) - fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR = reinterpret_cast(procAddr(device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR")); + fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR = reinterpret_cast( + procAddr(device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR")); #endif #if (defined(VK_EXT_line_rasterization)) - fp_vkCmdSetLineStippleEXT = reinterpret_cast(procAddr(device, "vkCmdSetLineStippleEXT")); + fp_vkCmdSetLineStippleEXT = + reinterpret_cast(procAddr(device, "vkCmdSetLineStippleEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) fp_vkCmdSetCullModeEXT = reinterpret_cast(procAddr(device, "vkCmdSetCullModeEXT")); @@ -2787,49 +3747,63 @@ struct DispatchTable { fp_vkCmdSetFrontFaceEXT = reinterpret_cast(procAddr(device, "vkCmdSetFrontFaceEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetPrimitiveTopologyEXT = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveTopologyEXT")); + fp_vkCmdSetPrimitiveTopologyEXT = + reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveTopologyEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetViewportWithCountEXT = reinterpret_cast(procAddr(device, "vkCmdSetViewportWithCountEXT")); + fp_vkCmdSetViewportWithCountEXT = + reinterpret_cast(procAddr(device, "vkCmdSetViewportWithCountEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetScissorWithCountEXT = reinterpret_cast(procAddr(device, "vkCmdSetScissorWithCountEXT")); + fp_vkCmdSetScissorWithCountEXT = + reinterpret_cast(procAddr(device, "vkCmdSetScissorWithCountEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdBindVertexBuffers2EXT = reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers2EXT")); + fp_vkCmdBindVertexBuffers2EXT = + reinterpret_cast(procAddr(device, "vkCmdBindVertexBuffers2EXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthTestEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthTestEnableEXT")); + fp_vkCmdSetDepthTestEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthTestEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthWriteEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthWriteEnableEXT")); + fp_vkCmdSetDepthWriteEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthWriteEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthCompareOpEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthCompareOpEXT")); + fp_vkCmdSetDepthCompareOpEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthCompareOpEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthBoundsTestEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthBoundsTestEnableEXT")); + fp_vkCmdSetDepthBoundsTestEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetDepthBoundsTestEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetStencilTestEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetStencilTestEnableEXT")); + fp_vkCmdSetStencilTestEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetStencilTestEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) fp_vkCmdSetStencilOpEXT = reinterpret_cast(procAddr(device, "vkCmdSetStencilOpEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetRasterizerDiscardEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetRasterizerDiscardEnableEXT")); + fp_vkCmdSetRasterizerDiscardEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetRasterizerDiscardEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetDepthBiasEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetDepthBiasEnableEXT")); + fp_vkCmdSetDepthBiasEnableEXT = + reinterpret_cast(procAddr(device, "vkCmdSetDepthBiasEnableEXT")); #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - fp_vkCmdSetPrimitiveRestartEnableEXT = reinterpret_cast(procAddr(device, "vkCmdSetPrimitiveRestartEnableEXT")); + fp_vkCmdSetPrimitiveRestartEnableEXT = reinterpret_cast( + procAddr(device, "vkCmdSetPrimitiveRestartEnableEXT")); #endif #if (defined(VK_EXT_private_data)) - fp_vkCreatePrivateDataSlotEXT = reinterpret_cast(procAddr(device, "vkCreatePrivateDataSlotEXT")); + fp_vkCreatePrivateDataSlotEXT = + reinterpret_cast(procAddr(device, "vkCreatePrivateDataSlotEXT")); #endif #if (defined(VK_EXT_private_data)) - fp_vkDestroyPrivateDataSlotEXT = reinterpret_cast(procAddr(device, "vkDestroyPrivateDataSlotEXT")); + fp_vkDestroyPrivateDataSlotEXT = + reinterpret_cast(procAddr(device, "vkDestroyPrivateDataSlotEXT")); #endif #if (defined(VK_EXT_private_data)) fp_vkSetPrivateDataEXT = reinterpret_cast(procAddr(device, "vkSetPrivateDataEXT")); @@ -2847,13 +3821,16 @@ struct DispatchTable { fp_vkCmdBlitImage2KHR = reinterpret_cast(procAddr(device, "vkCmdBlitImage2KHR")); #endif #if (defined(VK_KHR_copy_commands2)) - fp_vkCmdCopyBufferToImage2KHR = reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage2KHR")); + fp_vkCmdCopyBufferToImage2KHR = + reinterpret_cast(procAddr(device, "vkCmdCopyBufferToImage2KHR")); #endif #if (defined(VK_KHR_copy_commands2)) - fp_vkCmdCopyImageToBuffer2KHR = reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer2KHR")); + fp_vkCmdCopyImageToBuffer2KHR = + reinterpret_cast(procAddr(device, "vkCmdCopyImageToBuffer2KHR")); #endif #if (defined(VK_KHR_copy_commands2)) - fp_vkCmdResolveImage2KHR = reinterpret_cast(procAddr(device, "vkCmdResolveImage2KHR")); + fp_vkCmdResolveImage2KHR = + reinterpret_cast(procAddr(device, "vkCmdResolveImage2KHR")); #endif #if (defined(VK_KHR_synchronization2)) fp_vkCmdSetEvent2KHR = reinterpret_cast(procAddr(device, "vkCmdSetEvent2KHR")); @@ -2865,5225 +3842,6833 @@ struct DispatchTable { fp_vkCmdWaitEvents2KHR = reinterpret_cast(procAddr(device, "vkCmdWaitEvents2KHR")); #endif #if (defined(VK_KHR_synchronization2)) - fp_vkCmdPipelineBarrier2KHR = reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier2KHR")); + fp_vkCmdPipelineBarrier2KHR = + reinterpret_cast(procAddr(device, "vkCmdPipelineBarrier2KHR")); #endif #if (defined(VK_KHR_synchronization2)) fp_vkQueueSubmit2KHR = reinterpret_cast(procAddr(device, "vkQueueSubmit2KHR")); #endif #if (defined(VK_KHR_synchronization2)) - fp_vkCmdWriteTimestamp2KHR = reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp2KHR")); + fp_vkCmdWriteTimestamp2KHR = + reinterpret_cast(procAddr(device, "vkCmdWriteTimestamp2KHR")); #endif #if (defined(VK_KHR_dynamic_rendering)) - fp_vkCmdBeginRenderingKHR = reinterpret_cast(procAddr(device, "vkCmdBeginRenderingKHR")); + fp_vkCmdBeginRenderingKHR = + reinterpret_cast(procAddr(device, "vkCmdBeginRenderingKHR")); #endif #if (defined(VK_KHR_dynamic_rendering)) fp_vkCmdEndRenderingKHR = reinterpret_cast(procAddr(device, "vkCmdEndRenderingKHR")); #endif #if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) - fp_vkGetImageSubresourceLayout2EXT = reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout2EXT")); + fp_vkGetImageSubresourceLayout2EXT = + reinterpret_cast(procAddr(device, "vkGetImageSubresourceLayout2EXT")); #endif } - void getDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) const noexcept { + void getDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) const noexcept + { fp_vkGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue); } - VkResult queueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const noexcept { + VkResult + queueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const noexcept + { return fp_vkQueueSubmit(queue, submitCount, pSubmits, fence); } - VkResult queueWaitIdle(VkQueue queue) const noexcept { - return fp_vkQueueWaitIdle(queue); - } - VkResult deviceWaitIdle() const noexcept { - return fp_vkDeviceWaitIdle(device); - } - VkResult allocateMemory(const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const noexcept { + VkResult queueWaitIdle(VkQueue queue) const noexcept { return fp_vkQueueWaitIdle(queue); } + VkResult deviceWaitIdle() const noexcept { return fp_vkDeviceWaitIdle(device); } + VkResult allocateMemory(const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory) const noexcept + { return fp_vkAllocateMemory(device, pAllocateInfo, pAllocator, pMemory); } - void freeMemory(VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const noexcept { + void freeMemory(VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkFreeMemory(device, memory, pAllocator); } - VkResult mapMemory(VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) const noexcept { + VkResult mapMemory(VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData) const noexcept + { return fp_vkMapMemory(device, memory, offset, size, flags, ppData); } - void unmapMemory(VkDeviceMemory memory) const noexcept { - fp_vkUnmapMemory(device, memory); - } - VkResult flushMappedMemoryRanges(uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const noexcept { + void unmapMemory(VkDeviceMemory memory) const noexcept { fp_vkUnmapMemory(device, memory); } + VkResult flushMappedMemoryRanges(uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const noexcept + { return fp_vkFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); } - VkResult invalidateMappedMemoryRanges(uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const noexcept { + VkResult invalidateMappedMemoryRanges(uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges) const noexcept + { return fp_vkInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); } - void getDeviceMemoryCommitment(VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const noexcept { + void getDeviceMemoryCommitment(VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const noexcept + { fp_vkGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); } - void getBufferMemoryRequirements(VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const noexcept { + void getBufferMemoryRequirements(VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const noexcept + { fp_vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements); } - VkResult bindBufferMemory(VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const noexcept { + VkResult bindBufferMemory(VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const noexcept + { return fp_vkBindBufferMemory(device, buffer, memory, memoryOffset); } - void getImageMemoryRequirements(VkImage image, VkMemoryRequirements* pMemoryRequirements) const noexcept { + void getImageMemoryRequirements(VkImage image, VkMemoryRequirements* pMemoryRequirements) const noexcept + { fp_vkGetImageMemoryRequirements(device, image, pMemoryRequirements); } - VkResult bindImageMemory(VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const noexcept { + VkResult bindImageMemory(VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const noexcept + { return fp_vkBindImageMemory(device, image, memory, memoryOffset); } - void getImageSparseMemoryRequirements(VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const noexcept { + void getImageSparseMemoryRequirements(VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const noexcept + { fp_vkGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); } - VkResult queueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) const noexcept { + VkResult queueBindSparse(VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence) const noexcept + { return fp_vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence); } - VkResult createFence(const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const noexcept { + VkResult createFence(const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence) const noexcept + { return fp_vkCreateFence(device, pCreateInfo, pAllocator, pFence); } - void destroyFence(VkFence fence, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyFence(VkFence fence, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyFence(device, fence, pAllocator); } - VkResult resetFences(uint32_t fenceCount, const VkFence* pFences) const noexcept { + VkResult resetFences(uint32_t fenceCount, const VkFence* pFences) const noexcept + { return fp_vkResetFences(device, fenceCount, pFences); } - VkResult getFenceStatus(VkFence fence) const noexcept { - return fp_vkGetFenceStatus(device, fence); - } - VkResult waitForFences(uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) const noexcept { + VkResult getFenceStatus(VkFence fence) const noexcept { return fp_vkGetFenceStatus(device, fence); } + VkResult + waitForFences(uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) const noexcept + { return fp_vkWaitForFences(device, fenceCount, pFences, waitAll, timeout); } - VkResult createSemaphore(const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const noexcept { + VkResult createSemaphore(const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore) const noexcept + { return fp_vkCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); } - void destroySemaphore(VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySemaphore(VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySemaphore(device, semaphore, pAllocator); } - VkResult createEvent(const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) const noexcept { + VkResult createEvent(const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent) const noexcept + { return fp_vkCreateEvent(device, pCreateInfo, pAllocator, pEvent); } - void destroyEvent(VkEvent event, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyEvent(VkEvent event, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyEvent(device, event, pAllocator); } - VkResult getEventStatus(VkEvent event) const noexcept { - return fp_vkGetEventStatus(device, event); - } - VkResult setEvent(VkEvent event) const noexcept { - return fp_vkSetEvent(device, event); - } - VkResult resetEvent(VkEvent event) const noexcept { - return fp_vkResetEvent(device, event); - } - VkResult createQueryPool(const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const noexcept { + VkResult getEventStatus(VkEvent event) const noexcept { return fp_vkGetEventStatus(device, event); } + VkResult setEvent(VkEvent event) const noexcept { return fp_vkSetEvent(device, event); } + VkResult resetEvent(VkEvent event) const noexcept { return fp_vkResetEvent(device, event); } + VkResult createQueryPool(const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool) const noexcept + { return fp_vkCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); } - void destroyQueryPool(VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyQueryPool(VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyQueryPool(device, queryPool, pAllocator); } - VkResult getQueryPoolResults(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const noexcept { + VkResult getQueryPoolResults(VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags) const noexcept + { return fp_vkGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); } #if (defined(VK_VERSION_1_2)) - void resetQueryPool(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept { + void resetQueryPool(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept + { fp_vkResetQueryPool(device, queryPool, firstQuery, queryCount); } #endif - VkResult createBuffer(const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const noexcept { + VkResult createBuffer(const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer) const noexcept + { return fp_vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer); } - void destroyBuffer(VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyBuffer(VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyBuffer(device, buffer, pAllocator); } - VkResult createBufferView(const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const noexcept { + VkResult createBufferView(const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView) const noexcept + { return fp_vkCreateBufferView(device, pCreateInfo, pAllocator, pView); } - void destroyBufferView(VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyBufferView(VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyBufferView(device, bufferView, pAllocator); } - VkResult createImage(const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) const noexcept { + VkResult createImage(const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage) const noexcept + { return fp_vkCreateImage(device, pCreateInfo, pAllocator, pImage); } - void destroyImage(VkImage image, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyImage(VkImage image, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyImage(device, image, pAllocator); } - void getImageSubresourceLayout(VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) const noexcept { + void getImageSubresourceLayout(VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout) const noexcept + { fp_vkGetImageSubresourceLayout(device, image, pSubresource, pLayout); } - VkResult createImageView(const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) const noexcept { + VkResult createImageView(const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView) const noexcept + { return fp_vkCreateImageView(device, pCreateInfo, pAllocator, pView); } - void destroyImageView(VkImageView imageView, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyImageView(VkImageView imageView, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyImageView(device, imageView, pAllocator); } - VkResult createShaderModule(const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const noexcept { + VkResult createShaderModule(const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule) const noexcept + { return fp_vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); } - void destroyShaderModule(VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyShaderModule(VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyShaderModule(device, shaderModule, pAllocator); } - VkResult createPipelineCache(const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) const noexcept { + VkResult createPipelineCache(const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache) const noexcept + { return fp_vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); } - void destroyPipelineCache(VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyPipelineCache(VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyPipelineCache(device, pipelineCache, pAllocator); } - VkResult getPipelineCacheData(VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) const noexcept { + VkResult getPipelineCacheData(VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) const noexcept + { return fp_vkGetPipelineCacheData(device, pipelineCache, pDataSize, pData); } - VkResult mergePipelineCaches(VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) const noexcept { + VkResult mergePipelineCaches(VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches) const noexcept + { return fp_vkMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches); } #if (defined(VK_KHR_pipeline_binary)) - VkResult createPipelineBinariesKHR(const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries) const noexcept { + VkResult createPipelineBinariesKHR(const VkPipelineBinaryCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineBinaryHandlesInfoKHR* pBinaries) const noexcept + { return fp_vkCreatePipelineBinariesKHR(device, pCreateInfo, pAllocator, pBinaries); } #endif #if (defined(VK_KHR_pipeline_binary)) - void destroyPipelineBinaryKHR(VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyPipelineBinaryKHR(VkPipelineBinaryKHR pipelineBinary, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyPipelineBinaryKHR(device, pipelineBinary, pAllocator); } #endif #if (defined(VK_KHR_pipeline_binary)) - VkResult getPipelineKeyKHR(const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey) const noexcept { + VkResult getPipelineKeyKHR(const VkPipelineCreateInfoKHR* pPipelineCreateInfo, + VkPipelineBinaryKeyKHR* pPipelineKey) const noexcept + { return fp_vkGetPipelineKeyKHR(device, pPipelineCreateInfo, pPipelineKey); } #endif #if (defined(VK_KHR_pipeline_binary)) - VkResult getPipelineBinaryDataKHR(const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData) const noexcept { - return fp_vkGetPipelineBinaryDataKHR(device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData); + VkResult getPipelineBinaryDataKHR(const VkPipelineBinaryDataInfoKHR* pInfo, + VkPipelineBinaryKeyKHR* pPipelineBinaryKey, + size_t* pPipelineBinaryDataSize, + void* pPipelineBinaryData) const noexcept + { + return fp_vkGetPipelineBinaryDataKHR( + device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData); } #endif #if (defined(VK_KHR_pipeline_binary)) - VkResult releaseCapturedPipelineDataKHR(const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator) const noexcept { + VkResult releaseCapturedPipelineDataKHR(const VkReleaseCapturedPipelineDataInfoKHR* pInfo, + const VkAllocationCallbacks* pAllocator) const noexcept + { return fp_vkReleaseCapturedPipelineDataKHR(device, pInfo, pAllocator); } #endif - VkResult createGraphicsPipelines(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { - return fp_vkCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); - } - VkResult createComputePipelines(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { - return fp_vkCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + VkResult createGraphicsPipelines(VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines) const noexcept + { + return fp_vkCreateGraphicsPipelines( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } + VkResult createComputePipelines(VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines) const noexcept + { + return fp_vkCreateComputePipelines( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); } #if (defined(VK_HUAWEI_subpass_shading)) - VkResult getDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize) const noexcept { + VkResult getDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(VkRenderPass renderpass, + VkExtent2D* pMaxWorkgroupSize) const noexcept + { return fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(device, renderpass, pMaxWorkgroupSize); } #endif - void destroyPipeline(VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyPipeline(VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyPipeline(device, pipeline, pAllocator); } - VkResult createPipelineLayout(const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const noexcept { + VkResult createPipelineLayout(const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout) const noexcept + { return fp_vkCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); } - void destroyPipelineLayout(VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyPipelineLayout(VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyPipelineLayout(device, pipelineLayout, pAllocator); } - VkResult createSampler(const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) const noexcept { + VkResult createSampler(const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler) const noexcept + { return fp_vkCreateSampler(device, pCreateInfo, pAllocator, pSampler); } - void destroySampler(VkSampler sampler, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySampler(VkSampler sampler, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySampler(device, sampler, pAllocator); } - VkResult createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const noexcept { + VkResult createDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout) const noexcept + { return fp_vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout); } - void destroyDescriptorSetLayout(VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDescriptorSetLayout(VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); } - VkResult createDescriptorPool(const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) const noexcept { + VkResult createDescriptorPool(const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool) const noexcept + { return fp_vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); } - void destroyDescriptorPool(VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDescriptorPool(VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDescriptorPool(device, descriptorPool, pAllocator); } - VkResult resetDescriptorPool(VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const noexcept { + VkResult resetDescriptorPool(VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const noexcept + { return fp_vkResetDescriptorPool(device, descriptorPool, flags); } - VkResult allocateDescriptorSets(const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) const noexcept { + VkResult allocateDescriptorSets(const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets) const noexcept + { return fp_vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); } - VkResult freeDescriptorSets(VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets) const noexcept { + VkResult freeDescriptorSets(VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets) const noexcept + { return fp_vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets); } - void updateDescriptorSets(uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) const noexcept { - fp_vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); - } - VkResult createFramebuffer(const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const noexcept { + void updateDescriptorSets(uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies) const noexcept + { + fp_vkUpdateDescriptorSets( + device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); + } + VkResult createFramebuffer(const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer) const noexcept + { return fp_vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); } - void destroyFramebuffer(VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyFramebuffer(VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyFramebuffer(device, framebuffer, pAllocator); } - VkResult createRenderPass(const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const noexcept { + VkResult createRenderPass(const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass) const noexcept + { return fp_vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); } - void destroyRenderPass(VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyRenderPass(VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyRenderPass(device, renderPass, pAllocator); } - void getRenderAreaGranularity(VkRenderPass renderPass, VkExtent2D* pGranularity) const noexcept { + void getRenderAreaGranularity(VkRenderPass renderPass, VkExtent2D* pGranularity) const noexcept + { fp_vkGetRenderAreaGranularity(device, renderPass, pGranularity); } #if (defined(VK_KHR_maintenance5)) - void getRenderingAreaGranularityKHR(const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity) const noexcept { + void getRenderingAreaGranularityKHR(const VkRenderingAreaInfoKHR* pRenderingAreaInfo, + VkExtent2D* pGranularity) const noexcept + { fp_vkGetRenderingAreaGranularityKHR(device, pRenderingAreaInfo, pGranularity); } #endif - VkResult createCommandPool(const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const noexcept { + VkResult createCommandPool(const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool) const noexcept + { return fp_vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); } - void destroyCommandPool(VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyCommandPool(VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyCommandPool(device, commandPool, pAllocator); } - VkResult resetCommandPool(VkCommandPool commandPool, VkCommandPoolResetFlags flags) const noexcept { + VkResult resetCommandPool(VkCommandPool commandPool, VkCommandPoolResetFlags flags) const noexcept + { return fp_vkResetCommandPool(device, commandPool, flags); } - VkResult allocateCommandBuffers(const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) const noexcept { + VkResult allocateCommandBuffers(const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers) const noexcept + { return fp_vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); } - void freeCommandBuffers(VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) const noexcept { + void freeCommandBuffers(VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers) const noexcept + { fp_vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); } - VkResult beginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const noexcept { + VkResult beginCommandBuffer(VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo) const noexcept + { return fp_vkBeginCommandBuffer(commandBuffer, pBeginInfo); } - VkResult endCommandBuffer(VkCommandBuffer commandBuffer) const noexcept { + VkResult endCommandBuffer(VkCommandBuffer commandBuffer) const noexcept + { return fp_vkEndCommandBuffer(commandBuffer); } - VkResult resetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const noexcept { + VkResult resetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const noexcept + { return fp_vkResetCommandBuffer(commandBuffer, flags); } - void cmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const noexcept { + void cmdBindPipeline(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline) const noexcept + { fp_vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); } #if (defined(VK_EXT_attachment_feedback_loop_dynamic_state)) - void cmdSetAttachmentFeedbackLoopEnableEXT(VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask) const noexcept { + void cmdSetAttachmentFeedbackLoopEnableEXT(VkCommandBuffer commandBuffer, + VkImageAspectFlags aspectMask) const noexcept + { fp_vkCmdSetAttachmentFeedbackLoopEnableEXT(commandBuffer, aspectMask); } #endif - void cmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) const noexcept { + void cmdSetViewport(VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports) const noexcept + { fp_vkCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports); } - void cmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) const noexcept { + void cmdSetScissor(VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors) const noexcept + { fp_vkCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); } - void cmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const noexcept { + void cmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const noexcept + { fp_vkCmdSetLineWidth(commandBuffer, lineWidth); } - void cmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) const noexcept { + void cmdSetDepthBias(VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor) const noexcept + { fp_vkCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); } - void cmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const noexcept { + void cmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const noexcept + { fp_vkCmdSetBlendConstants(commandBuffer, blendConstants); } - void cmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const noexcept { + void cmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const noexcept + { fp_vkCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); } - void cmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) const noexcept { + void cmdSetStencilCompareMask(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask) const noexcept + { fp_vkCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); } - void cmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) const noexcept { + void cmdSetStencilWriteMask(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask) const noexcept + { fp_vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); } - void cmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) const noexcept { + void cmdSetStencilReference(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference) const noexcept + { fp_vkCmdSetStencilReference(commandBuffer, faceMask, reference); } - void cmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) const noexcept { - fp_vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); - } - void cmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const noexcept { + void cmdBindDescriptorSets(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets) const noexcept + { + fp_vkCmdBindDescriptorSets(commandBuffer, + pipelineBindPoint, + layout, + firstSet, + descriptorSetCount, + pDescriptorSets, + dynamicOffsetCount, + pDynamicOffsets); + } + void cmdBindIndexBuffer(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType) const noexcept + { fp_vkCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); } - void cmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const noexcept { + void cmdBindVertexBuffers(VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets) const noexcept + { fp_vkCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); } - void cmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) const noexcept { + void cmdDraw(VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance) const noexcept + { fp_vkCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); } - void cmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const noexcept { + void cmdDrawIndexed(VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance) const noexcept + { fp_vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); } #if (defined(VK_EXT_multi_draw)) - void cmdDrawMultiEXT(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride) const noexcept { + void cmdDrawMultiEXT(VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT* pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride) const noexcept + { fp_vkCmdDrawMultiEXT(commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride); } #endif #if (defined(VK_EXT_multi_draw)) - void cmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset) const noexcept { - fp_vkCmdDrawMultiIndexedEXT(commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset); - } -#endif - void cmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + void cmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT* pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t* pVertexOffset) const noexcept + { + fp_vkCmdDrawMultiIndexedEXT( + commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset); + } +#endif + void cmdDrawIndirect(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride) const noexcept + { fp_vkCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride); } - void cmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + void cmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride) const noexcept + { fp_vkCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride); } - void cmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + void cmdDispatch(VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ) const noexcept + { fp_vkCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ); } - void cmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const noexcept { + void cmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const noexcept + { fp_vkCmdDispatchIndirect(commandBuffer, buffer, offset); } #if (defined(VK_HUAWEI_subpass_shading)) - void cmdSubpassShadingHUAWEI(VkCommandBuffer commandBuffer) const noexcept { + void cmdSubpassShadingHUAWEI(VkCommandBuffer commandBuffer) const noexcept + { fp_vkCmdSubpassShadingHUAWEI(commandBuffer); } #endif #if (defined(VK_HUAWEI_cluster_culling_shader)) - void cmdDrawClusterHUAWEI(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + void cmdDrawClusterHUAWEI(VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ) const noexcept + { fp_vkCmdDrawClusterHUAWEI(commandBuffer, groupCountX, groupCountY, groupCountZ); } #endif #if (defined(VK_HUAWEI_cluster_culling_shader)) - void cmdDrawClusterIndirectHUAWEI(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const noexcept { + void + cmdDrawClusterIndirectHUAWEI(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const noexcept + { fp_vkCmdDrawClusterIndirectHUAWEI(commandBuffer, buffer, offset); } #endif #if (defined(VK_NV_device_generated_commands_compute)) - void cmdUpdatePipelineIndirectBufferNV(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const noexcept { + void cmdUpdatePipelineIndirectBufferNV(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline) const noexcept + { fp_vkCmdUpdatePipelineIndirectBufferNV(commandBuffer, pipelineBindPoint, pipeline); } #endif - void cmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) const noexcept { + void cmdCopyBuffer(VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions) const noexcept + { fp_vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); } - void cmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) const noexcept { + void cmdCopyImage(VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions) const noexcept + { fp_vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); } - void cmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) const noexcept { - fp_vkCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); - } - void cmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) const noexcept { + void cmdBlitImage(VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter) const noexcept + { + fp_vkCmdBlitImage( + commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); + } + void cmdCopyBufferToImage(VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions) const noexcept + { fp_vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); } - void cmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) const noexcept { + void cmdCopyImageToBuffer(VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions) const noexcept + { fp_vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); } #if (defined(VK_NV_copy_memory_indirect)) - void cmdCopyMemoryIndirectNV(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride) const noexcept { + void cmdCopyMemoryIndirectNV(VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride) const noexcept + { fp_vkCmdCopyMemoryIndirectNV(commandBuffer, copyBufferAddress, copyCount, stride); } #endif #if (defined(VK_NV_copy_memory_indirect)) - void cmdCopyMemoryToImageIndirectNV(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources) const noexcept { - fp_vkCmdCopyMemoryToImageIndirectNV(commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources); - } -#endif - void cmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) const noexcept { + void cmdCopyMemoryToImageIndirectNV(VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers* pImageSubresources) const noexcept + { + fp_vkCmdCopyMemoryToImageIndirectNV( + commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources); + } +#endif + void cmdUpdateBuffer(VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData) const noexcept + { fp_vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); } - void cmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) const noexcept { + void cmdFillBuffer(VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data) const noexcept + { fp_vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); } - void cmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) const noexcept { + void cmdClearColorImage(VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges) const noexcept + { fp_vkCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); } - void cmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) const noexcept { + void cmdClearDepthStencilImage(VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges) const noexcept + { fp_vkCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); } - void cmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) const noexcept { + void cmdClearAttachments(VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects) const noexcept + { fp_vkCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); } - void cmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) const noexcept { + void cmdResolveImage(VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions) const noexcept + { fp_vkCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); } - void cmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const noexcept { + void cmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const noexcept + { fp_vkCmdSetEvent(commandBuffer, event, stageMask); } - void cmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const noexcept { + void cmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const noexcept + { fp_vkCmdResetEvent(commandBuffer, event, stageMask); } - void cmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept { - fp_vkCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); - } - void cmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept { - fp_vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); - } - void cmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) const noexcept { + void cmdWaitEvents(VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept + { + fp_vkCmdWaitEvents(commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers); + } + void cmdPipelineBarrier(VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers) const noexcept + { + fp_vkCmdPipelineBarrier(commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers); + } + void cmdBeginQuery(VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags) const noexcept + { fp_vkCmdBeginQuery(commandBuffer, queryPool, query, flags); } - void cmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) const noexcept { + void cmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) const noexcept + { fp_vkCmdEndQuery(commandBuffer, queryPool, query); } #if (defined(VK_EXT_conditional_rendering)) - void cmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) const noexcept { + void + cmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) const noexcept + { fp_vkCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin); } #endif #if (defined(VK_EXT_conditional_rendering)) - void cmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) const noexcept { + void cmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) const noexcept + { fp_vkCmdEndConditionalRenderingEXT(commandBuffer); } #endif - void cmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept { + void cmdResetQueryPool(VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount) const noexcept + { fp_vkCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); } - void cmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) const noexcept { + void cmdWriteTimestamp(VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query) const noexcept + { fp_vkCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); } - void cmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) const noexcept { - fp_vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); - } - void cmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) const noexcept { + void cmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags) const noexcept + { + fp_vkCmdCopyQueryPoolResults( + commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); + } + void cmdPushConstants(VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues) const noexcept + { fp_vkCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); } - void cmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) const noexcept { + void cmdBeginRenderPass(VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents) const noexcept + { fp_vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); } - void cmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const noexcept { + void cmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const noexcept + { fp_vkCmdNextSubpass(commandBuffer, contents); } - void cmdEndRenderPass(VkCommandBuffer commandBuffer) const noexcept { - fp_vkCmdEndRenderPass(commandBuffer); - } - void cmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) const noexcept { + void cmdEndRenderPass(VkCommandBuffer commandBuffer) const noexcept { fp_vkCmdEndRenderPass(commandBuffer); } + void cmdExecuteCommands(VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers) const noexcept + { fp_vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers); } #if (defined(VK_KHR_display_swapchain)) - VkResult createSharedSwapchainsKHR(uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) const noexcept { + VkResult createSharedSwapchainsKHR(uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains) const noexcept + { return fp_vkCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); } #endif #if (defined(VK_KHR_swapchain)) - VkResult createSwapchainKHR(const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) const noexcept { + VkResult createSwapchainKHR(const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain) const noexcept + { return fp_vkCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); } #endif #if (defined(VK_KHR_swapchain)) - void destroySwapchainKHR(VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySwapchainKHR(VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySwapchainKHR(device, swapchain, pAllocator); } #endif #if (defined(VK_KHR_swapchain)) - VkResult getSwapchainImagesKHR(VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) const noexcept { + VkResult getSwapchainImagesKHR(VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages) const noexcept + { return fp_vkGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages); } #endif #if (defined(VK_KHR_swapchain)) - VkResult acquireNextImageKHR(VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) const noexcept { + VkResult acquireNextImageKHR(VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex) const noexcept + { return fp_vkAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex); } #endif #if (defined(VK_KHR_swapchain)) - VkResult queuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) const noexcept { + VkResult queuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) const noexcept + { return fp_vkQueuePresentKHR(queue, pPresentInfo); } #endif #if (defined(VK_EXT_debug_marker)) - VkResult debugMarkerSetObjectNameEXT(const VkDebugMarkerObjectNameInfoEXT* pNameInfo) const noexcept { + VkResult debugMarkerSetObjectNameEXT(const VkDebugMarkerObjectNameInfoEXT* pNameInfo) const noexcept + { return fp_vkDebugMarkerSetObjectNameEXT(device, pNameInfo); } #endif #if (defined(VK_EXT_debug_marker)) - VkResult debugMarkerSetObjectTagEXT(const VkDebugMarkerObjectTagInfoEXT* pTagInfo) const noexcept { + VkResult debugMarkerSetObjectTagEXT(const VkDebugMarkerObjectTagInfoEXT* pTagInfo) const noexcept + { return fp_vkDebugMarkerSetObjectTagEXT(device, pTagInfo); } #endif #if (defined(VK_EXT_debug_marker)) - void cmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const noexcept { + void cmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const noexcept + { fp_vkCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo); } #endif #if (defined(VK_EXT_debug_marker)) - void cmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const noexcept { + void cmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const noexcept + { fp_vkCmdDebugMarkerEndEXT(commandBuffer); } #endif #if (defined(VK_EXT_debug_marker)) - void cmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const noexcept { + void cmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const noexcept + { fp_vkCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo); } #endif #if (defined(VK_NV_external_memory_win32)) - VkResult getMemoryWin32HandleNV(VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) const noexcept { + VkResult getMemoryWin32HandleNV(VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle) const noexcept + { return fp_vkGetMemoryWin32HandleNV(device, memory, handleType, pHandle); } #endif #if (defined(VK_NV_device_generated_commands)) - void cmdExecuteGeneratedCommandsNV(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) const noexcept { + void cmdExecuteGeneratedCommandsNV(VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) const noexcept + { fp_vkCmdExecuteGeneratedCommandsNV(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); } #endif #if (defined(VK_NV_device_generated_commands)) - void cmdPreprocessGeneratedCommandsNV(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) const noexcept { + void cmdPreprocessGeneratedCommandsNV(VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo) const noexcept + { fp_vkCmdPreprocessGeneratedCommandsNV(commandBuffer, pGeneratedCommandsInfo); } #endif #if (defined(VK_NV_device_generated_commands)) - void cmdBindPipelineShaderGroupNV(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex) const noexcept { + void cmdBindPipelineShaderGroupNV(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex) const noexcept + { fp_vkCmdBindPipelineShaderGroupNV(commandBuffer, pipelineBindPoint, pipeline, groupIndex); } #endif #if (defined(VK_NV_device_generated_commands)) - void getGeneratedCommandsMemoryRequirementsNV(const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getGeneratedCommandsMemoryRequirementsNV(const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetGeneratedCommandsMemoryRequirementsNV(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_NV_device_generated_commands)) - VkResult createIndirectCommandsLayoutNV(const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout) const noexcept { + VkResult createIndirectCommandsLayoutNV(const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout) const noexcept + { return fp_vkCreateIndirectCommandsLayoutNV(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); } #endif #if (defined(VK_NV_device_generated_commands)) - void destroyIndirectCommandsLayoutNV(VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyIndirectCommandsLayoutNV(VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyIndirectCommandsLayoutNV(device, indirectCommandsLayout, pAllocator); } #endif #if (defined(VK_EXT_device_generated_commands)) - void cmdExecuteGeneratedCommandsEXT(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo) const noexcept { + void cmdExecuteGeneratedCommandsEXT(VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo) const noexcept + { fp_vkCmdExecuteGeneratedCommandsEXT(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); } #endif #if (defined(VK_EXT_device_generated_commands)) - void cmdPreprocessGeneratedCommandsEXT(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer) const noexcept { + void cmdPreprocessGeneratedCommandsEXT(VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer) const noexcept + { fp_vkCmdPreprocessGeneratedCommandsEXT(commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer); } #endif #if (defined(VK_EXT_device_generated_commands)) - void getGeneratedCommandsMemoryRequirementsEXT(const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getGeneratedCommandsMemoryRequirementsEXT(const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetGeneratedCommandsMemoryRequirementsEXT(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_EXT_device_generated_commands)) - VkResult createIndirectCommandsLayoutEXT(const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout) const noexcept { + VkResult createIndirectCommandsLayoutEXT(const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout) const noexcept + { return fp_vkCreateIndirectCommandsLayoutEXT(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); } #endif #if (defined(VK_EXT_device_generated_commands)) - void destroyIndirectCommandsLayoutEXT(VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyIndirectCommandsLayoutEXT(VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyIndirectCommandsLayoutEXT(device, indirectCommandsLayout, pAllocator); } #endif #if (defined(VK_EXT_device_generated_commands)) - VkResult createIndirectExecutionSetEXT(const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet) const noexcept { + VkResult createIndirectExecutionSetEXT(const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectExecutionSetEXT* pIndirectExecutionSet) const noexcept + { return fp_vkCreateIndirectExecutionSetEXT(device, pCreateInfo, pAllocator, pIndirectExecutionSet); } #endif #if (defined(VK_EXT_device_generated_commands)) - void destroyIndirectExecutionSetEXT(VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyIndirectExecutionSetEXT(VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyIndirectExecutionSetEXT(device, indirectExecutionSet, pAllocator); } #endif #if (defined(VK_EXT_device_generated_commands)) - void updateIndirectExecutionSetPipelineEXT(VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites) const noexcept { - fp_vkUpdateIndirectExecutionSetPipelineEXT(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); + void updateIndirectExecutionSetPipelineEXT( + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites) const noexcept + { + fp_vkUpdateIndirectExecutionSetPipelineEXT( + device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); } #endif #if (defined(VK_EXT_device_generated_commands)) - void updateIndirectExecutionSetShaderEXT(VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites) const noexcept { - fp_vkUpdateIndirectExecutionSetShaderEXT(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); + void + updateIndirectExecutionSetShaderEXT(VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites) const noexcept + { + fp_vkUpdateIndirectExecutionSetShaderEXT( + device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); } #endif #if (defined(VK_KHR_push_descriptor)) - void cmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) const noexcept { - fp_vkCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); + void cmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites) const noexcept + { + fp_vkCmdPushDescriptorSetKHR( + commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); } #endif #if (defined(VK_VERSION_1_1)) - void trimCommandPool(VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags) const noexcept { + void trimCommandPool(VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags) const noexcept + { fp_vkTrimCommandPool(device, commandPool, flags); } #endif #if (defined(VK_KHR_external_memory_win32)) - VkResult getMemoryWin32HandleKHR(const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const noexcept { + VkResult getMemoryWin32HandleKHR(const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle) const noexcept + { return fp_vkGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle); } #endif #if (defined(VK_KHR_external_memory_win32)) - VkResult getMemoryWin32HandlePropertiesKHR(VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) const noexcept { + VkResult + getMemoryWin32HandlePropertiesKHR(VkExternalMemoryHandleTypeFlagBitsKHR handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) const noexcept + { return fp_vkGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties); } #endif #if (defined(VK_KHR_external_memory_fd)) - VkResult getMemoryFdKHR(const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept { + VkResult getMemoryFdKHR(const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept + { return fp_vkGetMemoryFdKHR(device, pGetFdInfo, pFd); } #endif #if (defined(VK_KHR_external_memory_fd)) - VkResult getMemoryFdPropertiesKHR(VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) const noexcept { + VkResult getMemoryFdPropertiesKHR(VkExternalMemoryHandleTypeFlagBitsKHR handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties) const noexcept + { return fp_vkGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties); } #endif #if (defined(VK_FUCHSIA_external_memory)) - VkResult getMemoryZirconHandleFUCHSIA(const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle) const noexcept { + VkResult getMemoryZirconHandleFUCHSIA(const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle) const noexcept + { return fp_vkGetMemoryZirconHandleFUCHSIA(device, pGetZirconHandleInfo, pZirconHandle); } #endif #if (defined(VK_FUCHSIA_external_memory)) - VkResult getMemoryZirconHandlePropertiesFUCHSIA(VkExternalMemoryHandleTypeFlagBitsKHR handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties) const noexcept { - return fp_vkGetMemoryZirconHandlePropertiesFUCHSIA(device, handleType, zirconHandle, pMemoryZirconHandleProperties); + VkResult getMemoryZirconHandlePropertiesFUCHSIA( + VkExternalMemoryHandleTypeFlagBitsKHR handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties) const noexcept + { + return fp_vkGetMemoryZirconHandlePropertiesFUCHSIA( + device, handleType, zirconHandle, pMemoryZirconHandleProperties); } #endif #if (defined(VK_NV_external_memory_rdma)) - VkResult getMemoryRemoteAddressNV(const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress) const noexcept { + VkResult getMemoryRemoteAddressNV(const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV* pAddress) const noexcept + { return fp_vkGetMemoryRemoteAddressNV(device, pMemoryGetRemoteAddressInfo, pAddress); } #endif #if (defined(VK_NV_external_memory_sci_buf)) - VkResult getMemorySciBufNV(const VkMemoryGetSciBufInfoNV* pGetSciBufInfo, NvSciBufObj* pHandle) const noexcept { + VkResult getMemorySciBufNV(const VkMemoryGetSciBufInfoNV* pGetSciBufInfo, NvSciBufObj* pHandle) const noexcept + { return fp_vkGetMemorySciBufNV(device, pGetSciBufInfo, pHandle); } #endif #if (defined(VK_KHR_external_semaphore_win32)) - VkResult getSemaphoreWin32HandleKHR(const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const noexcept { + VkResult getSemaphoreWin32HandleKHR(const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle) const noexcept + { return fp_vkGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle); } #endif #if (defined(VK_KHR_external_semaphore_win32)) - VkResult importSemaphoreWin32HandleKHR(const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) const noexcept { + VkResult importSemaphoreWin32HandleKHR( + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) const noexcept + { return fp_vkImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo); } #endif #if (defined(VK_KHR_external_semaphore_fd)) - VkResult getSemaphoreFdKHR(const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept { + VkResult getSemaphoreFdKHR(const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept + { return fp_vkGetSemaphoreFdKHR(device, pGetFdInfo, pFd); } #endif #if (defined(VK_KHR_external_semaphore_fd)) - VkResult importSemaphoreFdKHR(const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) const noexcept { + VkResult importSemaphoreFdKHR(const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) const noexcept + { return fp_vkImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo); } #endif #if (defined(VK_FUCHSIA_external_semaphore)) - VkResult getSemaphoreZirconHandleFUCHSIA(const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle) const noexcept { + VkResult getSemaphoreZirconHandleFUCHSIA(const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle) const noexcept + { return fp_vkGetSemaphoreZirconHandleFUCHSIA(device, pGetZirconHandleInfo, pZirconHandle); } #endif #if (defined(VK_FUCHSIA_external_semaphore)) - VkResult importSemaphoreZirconHandleFUCHSIA(const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo) const noexcept { + VkResult importSemaphoreZirconHandleFUCHSIA( + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo) const noexcept + { return fp_vkImportSemaphoreZirconHandleFUCHSIA(device, pImportSemaphoreZirconHandleInfo); } #endif #if (defined(VK_KHR_external_fence_win32)) - VkResult getFenceWin32HandleKHR(const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const noexcept { + VkResult getFenceWin32HandleKHR(const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle) const noexcept + { return fp_vkGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle); } #endif #if (defined(VK_KHR_external_fence_win32)) - VkResult importFenceWin32HandleKHR(const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) const noexcept { + VkResult + importFenceWin32HandleKHR(const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) const noexcept + { return fp_vkImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo); } #endif #if (defined(VK_KHR_external_fence_fd)) - VkResult getFenceFdKHR(const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept { + VkResult getFenceFdKHR(const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) const noexcept + { return fp_vkGetFenceFdKHR(device, pGetFdInfo, pFd); } #endif #if (defined(VK_KHR_external_fence_fd)) - VkResult importFenceFdKHR(const VkImportFenceFdInfoKHR* pImportFenceFdInfo) const noexcept { + VkResult importFenceFdKHR(const VkImportFenceFdInfoKHR* pImportFenceFdInfo) const noexcept + { return fp_vkImportFenceFdKHR(device, pImportFenceFdInfo); } #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - VkResult getFenceSciSyncFenceNV(const VkFenceGetSciSyncInfoNV* pGetSciSyncHandleInfo, void* pHandle) const noexcept { + VkResult getFenceSciSyncFenceNV(const VkFenceGetSciSyncInfoNV* pGetSciSyncHandleInfo, void* pHandle) const noexcept + { return fp_vkGetFenceSciSyncFenceNV(device, pGetSciSyncHandleInfo, pHandle); } #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - VkResult getFenceSciSyncObjNV(const VkFenceGetSciSyncInfoNV* pGetSciSyncHandleInfo, void* pHandle) const noexcept { + VkResult getFenceSciSyncObjNV(const VkFenceGetSciSyncInfoNV* pGetSciSyncHandleInfo, void* pHandle) const noexcept + { return fp_vkGetFenceSciSyncObjNV(device, pGetSciSyncHandleInfo, pHandle); } #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - VkResult importFenceSciSyncFenceNV(const VkImportFenceSciSyncInfoNV* pImportFenceSciSyncInfo) const noexcept { + VkResult importFenceSciSyncFenceNV(const VkImportFenceSciSyncInfoNV* pImportFenceSciSyncInfo) const noexcept + { return fp_vkImportFenceSciSyncFenceNV(device, pImportFenceSciSyncInfo); } #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) - VkResult importFenceSciSyncObjNV(const VkImportFenceSciSyncInfoNV* pImportFenceSciSyncInfo) const noexcept { + VkResult importFenceSciSyncObjNV(const VkImportFenceSciSyncInfoNV* pImportFenceSciSyncInfo) const noexcept + { return fp_vkImportFenceSciSyncObjNV(device, pImportFenceSciSyncInfo); } #endif #if (defined(VK_NV_external_sci_sync)) - VkResult getSemaphoreSciSyncObjNV(const VkSemaphoreGetSciSyncInfoNV* pGetSciSyncInfo, void* pHandle) const noexcept { + VkResult getSemaphoreSciSyncObjNV(const VkSemaphoreGetSciSyncInfoNV* pGetSciSyncInfo, void* pHandle) const noexcept + { return fp_vkGetSemaphoreSciSyncObjNV(device, pGetSciSyncInfo, pHandle); } #endif #if (defined(VK_NV_external_sci_sync)) - VkResult importSemaphoreSciSyncObjNV(const VkImportSemaphoreSciSyncInfoNV* pImportSemaphoreSciSyncInfo) const noexcept { + VkResult + importSemaphoreSciSyncObjNV(const VkImportSemaphoreSciSyncInfoNV* pImportSemaphoreSciSyncInfo) const noexcept + { return fp_vkImportSemaphoreSciSyncObjNV(device, pImportSemaphoreSciSyncInfo); } #endif #if (defined(VK_NV_external_sci_sync2)) - VkResult createSemaphoreSciSyncPoolNV(const VkSemaphoreSciSyncPoolCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphoreSciSyncPoolNV* pSemaphorePool) const noexcept { + VkResult createSemaphoreSciSyncPoolNV(const VkSemaphoreSciSyncPoolCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphoreSciSyncPoolNV* pSemaphorePool) const noexcept + { return fp_vkCreateSemaphoreSciSyncPoolNV(device, pCreateInfo, pAllocator, pSemaphorePool); } #endif #if (defined(VK_NV_external_sci_sync2)) - void destroySemaphoreSciSyncPoolNV(VkSemaphoreSciSyncPoolNV semaphorePool, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySemaphoreSciSyncPoolNV(VkSemaphoreSciSyncPoolNV semaphorePool, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySemaphoreSciSyncPoolNV(device, semaphorePool, pAllocator); } #endif #if (defined(VK_EXT_display_control)) - VkResult displayPowerControlEXT(VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) const noexcept { + VkResult displayPowerControlEXT(VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) const noexcept + { return fp_vkDisplayPowerControlEXT(device, display, pDisplayPowerInfo); } #endif #if (defined(VK_EXT_display_control)) - VkResult registerDeviceEventEXT(const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const noexcept { + VkResult registerDeviceEventEXT(const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence) const noexcept + { return fp_vkRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence); } #endif #if (defined(VK_EXT_display_control)) - VkResult registerDisplayEventEXT(VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const noexcept { + VkResult registerDisplayEventEXT(VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence) const noexcept + { return fp_vkRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence); } #endif #if (defined(VK_EXT_display_control)) - VkResult getSwapchainCounterEXT(VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) const noexcept { + VkResult getSwapchainCounterEXT(VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue) const noexcept + { return fp_vkGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue); } #endif #if (defined(VK_VERSION_1_1)) - void getDeviceGroupPeerMemoryFeatures(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHR* pPeerMemoryFeatures) const noexcept { - fp_vkGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + void getDeviceGroupPeerMemoryFeatures(uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlagsKHR* pPeerMemoryFeatures) const noexcept + { + fp_vkGetDeviceGroupPeerMemoryFeatures( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); } #endif #if (defined(VK_VERSION_1_1)) - VkResult bindBufferMemory2(uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos) const noexcept { + VkResult bindBufferMemory2(uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos) const noexcept + { return fp_vkBindBufferMemory2(device, bindInfoCount, pBindInfos); } #endif #if (defined(VK_VERSION_1_1)) - VkResult bindImageMemory2(uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const noexcept { + VkResult bindImageMemory2(uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const noexcept + { return fp_vkBindImageMemory2(device, bindInfoCount, pBindInfos); } #endif #if (defined(VK_VERSION_1_1)) - void cmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const noexcept { + void cmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const noexcept + { fp_vkCmdSetDeviceMask(commandBuffer, deviceMask); } #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - VkResult getDeviceGroupPresentCapabilitiesKHR(VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) const noexcept { + VkResult getDeviceGroupPresentCapabilitiesKHR( + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) const noexcept + { return fp_vkGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities); } #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - VkResult getDeviceGroupSurfacePresentModesKHR(VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) const noexcept { + VkResult getDeviceGroupSurfacePresentModesKHR(VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes) const noexcept + { return fp_vkGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes); } #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) - VkResult acquireNextImage2KHR(const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) const noexcept { + VkResult acquireNextImage2KHR(const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) const noexcept + { return fp_vkAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex); } #endif #if (defined(VK_VERSION_1_1)) - void cmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + void cmdDispatchBase(VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ) const noexcept + { fp_vkCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); } #endif #if (defined(VK_VERSION_1_1)) - VkResult createDescriptorUpdateTemplate(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const noexcept { + VkResult createDescriptorUpdateTemplate(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const noexcept + { return fp_vkCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); } #endif #if (defined(VK_VERSION_1_1)) - void destroyDescriptorUpdateTemplate(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDescriptorUpdateTemplate(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator); } #endif #if (defined(VK_VERSION_1_1)) - void updateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData) const noexcept { + void updateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const void* pData) const noexcept + { fp_vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData); } #endif #if (defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_descriptor_update_template)) - void cmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) const noexcept { + void cmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData) const noexcept + { fp_vkCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData); } #endif #if (defined(VK_EXT_hdr_metadata)) - void setHdrMetadataEXT(uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) const noexcept { + void setHdrMetadataEXT(uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata) const noexcept + { fp_vkSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata); } #endif #if (defined(VK_KHR_shared_presentable_image)) - VkResult getSwapchainStatusKHR(VkSwapchainKHR swapchain) const noexcept { + VkResult getSwapchainStatusKHR(VkSwapchainKHR swapchain) const noexcept + { return fp_vkGetSwapchainStatusKHR(device, swapchain); } #endif #if (defined(VK_GOOGLE_display_timing)) - VkResult getRefreshCycleDurationGOOGLE(VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) const noexcept { + VkResult getRefreshCycleDurationGOOGLE(VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) const noexcept + { return fp_vkGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties); } #endif #if (defined(VK_GOOGLE_display_timing)) - VkResult getPastPresentationTimingGOOGLE(VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) const noexcept { + VkResult getPastPresentationTimingGOOGLE(VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings) const noexcept + { return fp_vkGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings); } #endif #if (defined(VK_NV_clip_space_w_scaling)) - void cmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) const noexcept { + void cmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings) const noexcept + { fp_vkCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings); } #endif #if (defined(VK_EXT_discard_rectangles)) - void cmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) const noexcept { + void cmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles) const noexcept + { fp_vkCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles); } #endif #if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 - void cmdSetDiscardRectangleEnableEXT(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable) const noexcept { + void cmdSetDiscardRectangleEnableEXT(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable) const noexcept + { fp_vkCmdSetDiscardRectangleEnableEXT(commandBuffer, discardRectangleEnable); } #endif #if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 - void cmdSetDiscardRectangleModeEXT(VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode) const noexcept { + void cmdSetDiscardRectangleModeEXT(VkCommandBuffer commandBuffer, + VkDiscardRectangleModeEXT discardRectangleMode) const noexcept + { fp_vkCmdSetDiscardRectangleModeEXT(commandBuffer, discardRectangleMode); } #endif #if (defined(VK_EXT_sample_locations)) - void cmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) const noexcept { + void cmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo) const noexcept + { fp_vkCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo); } #endif #if (defined(VK_VERSION_1_1)) - void getBufferMemoryRequirements2(const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getBufferMemoryRequirements2(const VkBufferMemoryRequirementsInfo2KHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_VERSION_1_1)) - void getImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2KHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_VERSION_1_1)) - void getImageSparseMemoryRequirements2(const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { + void + getImageSparseMemoryRequirements2(const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept + { fp_vkGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); } #endif #if (defined(VK_VERSION_1_3)) - void getDeviceBufferMemoryRequirements(const VkDeviceBufferMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getDeviceBufferMemoryRequirements(const VkDeviceBufferMemoryRequirementsKHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetDeviceBufferMemoryRequirements(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_VERSION_1_3)) - void getDeviceImageMemoryRequirements(const VkDeviceImageMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getDeviceImageMemoryRequirements(const VkDeviceImageMemoryRequirementsKHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetDeviceImageMemoryRequirements(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_VERSION_1_3)) - void getDeviceImageSparseMemoryRequirements(const VkDeviceImageMemoryRequirementsKHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { - fp_vkGetDeviceImageSparseMemoryRequirements(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + void getDeviceImageSparseMemoryRequirements( + const VkDeviceImageMemoryRequirementsKHR* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept + { + fp_vkGetDeviceImageSparseMemoryRequirements( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); } #endif #if (defined(VK_VERSION_1_1)) - VkResult createSamplerYcbcrConversion(const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversionKHR* pYcbcrConversion) const noexcept { + VkResult createSamplerYcbcrConversion(const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversionKHR* pYcbcrConversion) const noexcept + { return fp_vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion); } #endif #if (defined(VK_VERSION_1_1)) - void destroySamplerYcbcrConversion(VkSamplerYcbcrConversionKHR ycbcrConversion, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySamplerYcbcrConversion(VkSamplerYcbcrConversionKHR ycbcrConversion, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator); } #endif #if (defined(VK_VERSION_1_1)) - void getDeviceQueue2(const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) const noexcept { + void getDeviceQueue2(const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) const noexcept + { fp_vkGetDeviceQueue2(device, pQueueInfo, pQueue); } #endif #if (defined(VK_EXT_validation_cache)) - VkResult createValidationCacheEXT(const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) const noexcept { + VkResult createValidationCacheEXT(const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache) const noexcept + { return fp_vkCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache); } #endif #if (defined(VK_EXT_validation_cache)) - void destroyValidationCacheEXT(VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyValidationCacheEXT(VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyValidationCacheEXT(device, validationCache, pAllocator); } #endif #if (defined(VK_EXT_validation_cache)) - VkResult getValidationCacheDataEXT(VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) const noexcept { + VkResult + getValidationCacheDataEXT(VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) const noexcept + { return fp_vkGetValidationCacheDataEXT(device, validationCache, pDataSize, pData); } #endif #if (defined(VK_EXT_validation_cache)) - VkResult mergeValidationCachesEXT(VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) const noexcept { + VkResult mergeValidationCachesEXT(VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches) const noexcept + { return fp_vkMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches); } #endif #if (defined(VK_VERSION_1_1)) - void getDescriptorSetLayoutSupport(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupportKHR* pSupport) const noexcept { + void getDescriptorSetLayoutSupport(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupportKHR* pSupport) const noexcept + { fp_vkGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport); } #endif #if (defined(VK_ANDROID_native_buffer)) - VkResult getSwapchainGrallocUsageANDROID(VkFormat format, VkImageUsageFlags imageUsage, int* grallocUsage) const noexcept { + VkResult + getSwapchainGrallocUsageANDROID(VkFormat format, VkImageUsageFlags imageUsage, int* grallocUsage) const noexcept + { return fp_vkGetSwapchainGrallocUsageANDROID(device, format, imageUsage, grallocUsage); } #endif #if (defined(VK_ANDROID_native_buffer)) - VkResult getSwapchainGrallocUsage2ANDROID(VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage, uint64_t* grallocProducerUsage) const noexcept { - return fp_vkGetSwapchainGrallocUsage2ANDROID(device, format, imageUsage, swapchainImageUsage, grallocConsumerUsage, grallocProducerUsage); + VkResult getSwapchainGrallocUsage2ANDROID(VkFormat format, + VkImageUsageFlags imageUsage, + VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, + uint64_t* grallocConsumerUsage, + uint64_t* grallocProducerUsage) const noexcept + { + return fp_vkGetSwapchainGrallocUsage2ANDROID( + device, format, imageUsage, swapchainImageUsage, grallocConsumerUsage, grallocProducerUsage); } #endif #if (defined(VK_ANDROID_native_buffer)) - VkResult acquireImageANDROID(VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence) const noexcept { + VkResult acquireImageANDROID(VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence) const noexcept + { return fp_vkAcquireImageANDROID(device, image, nativeFenceFd, semaphore, fence); } #endif #if (defined(VK_ANDROID_native_buffer)) - VkResult queueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) const noexcept { + VkResult queueSignalReleaseImageANDROID(VkQueue queue, + uint32_t waitSemaphoreCount, + const VkSemaphore* pWaitSemaphores, + VkImage image, + int* pNativeFenceFd) const noexcept + { return fp_vkQueueSignalReleaseImageANDROID(queue, waitSemaphoreCount, pWaitSemaphores, image, pNativeFenceFd); } #endif #if (defined(VK_AMD_shader_info)) - VkResult getShaderInfoAMD(VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) const noexcept { + VkResult getShaderInfoAMD(VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo) const noexcept + { return fp_vkGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo); } #endif #if (defined(VK_AMD_display_native_hdr)) - void setLocalDimmingAMD(VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) const noexcept { + void setLocalDimmingAMD(VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) const noexcept + { fp_vkSetLocalDimmingAMD(device, swapChain, localDimmingEnable); } #endif #if (defined(VK_KHR_calibrated_timestamps)) - VkResult getCalibratedTimestampsKHR(uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) const noexcept { + VkResult getCalibratedTimestampsKHR(uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation) const noexcept + { return fp_vkGetCalibratedTimestampsKHR(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); } #endif #if (defined(VK_EXT_debug_utils)) - VkResult setDebugUtilsObjectNameEXT(const VkDebugUtilsObjectNameInfoEXT* pNameInfo) const noexcept { + VkResult setDebugUtilsObjectNameEXT(const VkDebugUtilsObjectNameInfoEXT* pNameInfo) const noexcept + { return fp_vkSetDebugUtilsObjectNameEXT(device, pNameInfo); } #endif #if (defined(VK_EXT_debug_utils)) - VkResult setDebugUtilsObjectTagEXT(const VkDebugUtilsObjectTagInfoEXT* pTagInfo) const noexcept { + VkResult setDebugUtilsObjectTagEXT(const VkDebugUtilsObjectTagInfoEXT* pTagInfo) const noexcept + { return fp_vkSetDebugUtilsObjectTagEXT(device, pTagInfo); } #endif #if (defined(VK_EXT_debug_utils)) - void queueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + void queueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept + { fp_vkQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo); } #endif #if (defined(VK_EXT_debug_utils)) - void queueEndDebugUtilsLabelEXT(VkQueue queue) const noexcept { - fp_vkQueueEndDebugUtilsLabelEXT(queue); - } + void queueEndDebugUtilsLabelEXT(VkQueue queue) const noexcept { fp_vkQueueEndDebugUtilsLabelEXT(queue); } #endif #if (defined(VK_EXT_debug_utils)) - void queueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + void queueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept + { fp_vkQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo); } #endif #if (defined(VK_EXT_debug_utils)) - void cmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + void cmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept + { fp_vkCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo); } #endif #if (defined(VK_EXT_debug_utils)) - void cmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) const noexcept { + void cmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) const noexcept + { fp_vkCmdEndDebugUtilsLabelEXT(commandBuffer); } #endif #if (defined(VK_EXT_debug_utils)) - void cmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept { + void cmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo) const noexcept + { fp_vkCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo); } #endif #if (defined(VK_EXT_external_memory_host)) - VkResult getMemoryHostPointerPropertiesEXT(VkExternalMemoryHandleTypeFlagBitsKHR handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) const noexcept { + VkResult + getMemoryHostPointerPropertiesEXT(VkExternalMemoryHandleTypeFlagBitsKHR handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) const noexcept + { return fp_vkGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties); } #endif #if (defined(VK_AMD_buffer_marker)) - void cmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const noexcept { + void cmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker) const noexcept + { fp_vkCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker); } #endif #if (defined(VK_VERSION_1_2)) - VkResult createRenderPass2(const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const noexcept { + VkResult createRenderPass2(const VkRenderPassCreateInfo2KHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass) const noexcept + { return fp_vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass); } #endif #if (defined(VK_VERSION_1_2)) - void cmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const noexcept { + void cmdBeginRenderPass2(VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const noexcept + { fp_vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); } #endif #if (defined(VK_VERSION_1_2)) - void cmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + void cmdNextSubpass2(VkCommandBuffer commandBuffer, + const VkSubpassBeginInfoKHR* pSubpassBeginInfo, + const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept + { fp_vkCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); } #endif #if (defined(VK_VERSION_1_2)) - void cmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + void cmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept + { fp_vkCmdEndRenderPass2(commandBuffer, pSubpassEndInfo); } #endif #if (defined(VK_VERSION_1_2)) - VkResult getSemaphoreCounterValue(VkSemaphore semaphore, uint64_t* pValue) const noexcept { + VkResult getSemaphoreCounterValue(VkSemaphore semaphore, uint64_t* pValue) const noexcept + { return fp_vkGetSemaphoreCounterValue(device, semaphore, pValue); } #endif #if (defined(VK_VERSION_1_2)) - VkResult waitSemaphores(const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const noexcept { + VkResult waitSemaphores(const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const noexcept + { return fp_vkWaitSemaphores(device, pWaitInfo, timeout); } #endif #if (defined(VK_VERSION_1_2)) - VkResult signalSemaphore(const VkSemaphoreSignalInfoKHR* pSignalInfo) const noexcept { + VkResult signalSemaphore(const VkSemaphoreSignalInfoKHR* pSignalInfo) const noexcept + { return fp_vkSignalSemaphore(device, pSignalInfo); } #endif #if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) - VkResult getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) const noexcept { + VkResult + getAndroidHardwareBufferPropertiesANDROID(const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties) const noexcept + { return fp_vkGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties); } #endif #if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) - VkResult getMemoryAndroidHardwareBufferANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) const noexcept { + VkResult getMemoryAndroidHardwareBufferANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer) const noexcept + { return fp_vkGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer); } #endif #if (defined(VK_VERSION_1_2)) - void cmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { + void cmdDrawIndirectCount(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride) const noexcept + { fp_vkCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); } #endif #if (defined(VK_VERSION_1_2)) - void cmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { - fp_vkCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + void cmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride) const noexcept + { + fp_vkCmdDrawIndexedIndirectCount( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); } #endif #if (defined(VK_NV_device_diagnostic_checkpoints)) - void cmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) const noexcept { + void cmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) const noexcept + { fp_vkCmdSetCheckpointNV(commandBuffer, pCheckpointMarker); } #endif #if (defined(VK_NV_device_diagnostic_checkpoints)) - void getQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) const noexcept { + void getQueueCheckpointDataNV(VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData) const noexcept + { fp_vkGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData); } #endif #if (defined(VK_EXT_transform_feedback)) - void cmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) const noexcept { + void cmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes) const noexcept + { fp_vkCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes); } #endif #if (defined(VK_EXT_transform_feedback)) - void cmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) const noexcept { - fp_vkCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); + void cmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets) const noexcept + { + fp_vkCmdBeginTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); } #endif #if (defined(VK_EXT_transform_feedback)) - void cmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) const noexcept { - fp_vkCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); + void cmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets) const noexcept + { + fp_vkCmdEndTransformFeedbackEXT( + commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); } #endif #if (defined(VK_EXT_transform_feedback)) - void cmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) const noexcept { + void cmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index) const noexcept + { fp_vkCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index); } #endif #if (defined(VK_EXT_transform_feedback)) - void cmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) const noexcept { + void cmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index) const noexcept + { fp_vkCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index); } #endif #if (defined(VK_EXT_transform_feedback)) - void cmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) const noexcept { - fp_vkCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride); + void cmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride) const noexcept + { + fp_vkCmdDrawIndirectByteCountEXT(commandBuffer, + instanceCount, + firstInstance, + counterBuffer, + counterBufferOffset, + counterOffset, + vertexStride); } #endif #if (defined(VK_NV_scissor_exclusive)) - void cmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) const noexcept { + void cmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors) const noexcept + { fp_vkCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors); } #endif #if ((defined(VK_NV_scissor_exclusive))) && VK_HEADER_VERSION >= 241 - void cmdSetExclusiveScissorEnableNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkBool32* pExclusiveScissorEnables) const noexcept { - fp_vkCmdSetExclusiveScissorEnableNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables); + void cmdSetExclusiveScissorEnableNV(VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32* pExclusiveScissorEnables) const noexcept + { + fp_vkCmdSetExclusiveScissorEnableNV( + commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables); } #endif #if (defined(VK_NV_shading_rate_image)) - void cmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) const noexcept { + void cmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout) const noexcept + { fp_vkCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout); } #endif #if (defined(VK_NV_shading_rate_image)) - void cmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) const noexcept { + void cmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes) const noexcept + { fp_vkCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes); } #endif #if (defined(VK_NV_shading_rate_image)) - void cmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) const noexcept { + void cmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) const noexcept + { fp_vkCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders); } #endif #if (defined(VK_NV_mesh_shader)) - void cmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const noexcept { + void cmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const noexcept + { fp_vkCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask); } #endif #if (defined(VK_NV_mesh_shader)) - void cmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + void cmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride) const noexcept + { fp_vkCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride); } #endif #if (defined(VK_NV_mesh_shader)) - void cmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { - fp_vkCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + void cmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride) const noexcept + { + fp_vkCmdDrawMeshTasksIndirectCountNV( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); } #endif #if (defined(VK_EXT_mesh_shader)) - void cmdDrawMeshTasksEXT(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { + void cmdDrawMeshTasksEXT(VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ) const noexcept + { fp_vkCmdDrawMeshTasksEXT(commandBuffer, groupCountX, groupCountY, groupCountZ); } #endif #if (defined(VK_EXT_mesh_shader)) - void cmdDrawMeshTasksIndirectEXT(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const noexcept { + void cmdDrawMeshTasksIndirectEXT(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride) const noexcept + { fp_vkCmdDrawMeshTasksIndirectEXT(commandBuffer, buffer, offset, drawCount, stride); } #endif #if (defined(VK_EXT_mesh_shader)) - void cmdDrawMeshTasksIndirectCountEXT(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { - fp_vkCmdDrawMeshTasksIndirectCountEXT(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + void cmdDrawMeshTasksIndirectCountEXT(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride) const noexcept + { + fp_vkCmdDrawMeshTasksIndirectCountEXT( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); } #endif #if (defined(VK_NV_ray_tracing)) - VkResult compileDeferredNV(VkPipeline pipeline, uint32_t shader) const noexcept { + VkResult compileDeferredNV(VkPipeline pipeline, uint32_t shader) const noexcept + { return fp_vkCompileDeferredNV(device, pipeline, shader); } #endif #if (defined(VK_NV_ray_tracing)) - VkResult createAccelerationStructureNV(const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) const noexcept { + VkResult createAccelerationStructureNV(const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure) const noexcept + { return fp_vkCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure); } #endif #if (defined(VK_HUAWEI_invocation_mask)) - void cmdBindInvocationMaskHUAWEI(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) const noexcept { + void cmdBindInvocationMaskHUAWEI(VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout) const noexcept + { fp_vkCmdBindInvocationMaskHUAWEI(commandBuffer, imageView, imageLayout); } #endif #if (defined(VK_KHR_acceleration_structure)) - void destroyAccelerationStructureKHR(VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyAccelerationStructureKHR(VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator); } #endif #if (defined(VK_NV_ray_tracing)) - void destroyAccelerationStructureNV(VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyAccelerationStructureNV(VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator); } #endif #if (defined(VK_NV_ray_tracing)) - void getAccelerationStructureMemoryRequirementsNV(const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getAccelerationStructureMemoryRequirementsNV(const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_NV_ray_tracing)) - VkResult bindAccelerationStructureMemoryNV(uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const noexcept { + VkResult bindAccelerationStructureMemoryNV(uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const noexcept + { return fp_vkBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos); } #endif #if (defined(VK_NV_ray_tracing)) - void cmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) const noexcept { + void cmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeNV mode) const noexcept + { fp_vkCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode); } #endif #if (defined(VK_KHR_acceleration_structure)) - void cmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo) const noexcept { + void cmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo) const noexcept + { fp_vkCmdCopyAccelerationStructureKHR(commandBuffer, pInfo); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkResult copyAccelerationStructureKHR(VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo) const noexcept { + VkResult copyAccelerationStructureKHR(VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo) const noexcept + { return fp_vkCopyAccelerationStructureKHR(device, deferredOperation, pInfo); } #endif #if (defined(VK_KHR_acceleration_structure)) - void cmdCopyAccelerationStructureToMemoryKHR(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) const noexcept { + void cmdCopyAccelerationStructureToMemoryKHR(VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) const noexcept + { fp_vkCmdCopyAccelerationStructureToMemoryKHR(commandBuffer, pInfo); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkResult copyAccelerationStructureToMemoryKHR(VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) const noexcept { + VkResult + copyAccelerationStructureToMemoryKHR(VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) const noexcept + { return fp_vkCopyAccelerationStructureToMemoryKHR(device, deferredOperation, pInfo); } #endif #if (defined(VK_KHR_acceleration_structure)) - void cmdCopyMemoryToAccelerationStructureKHR(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) const noexcept { + void cmdCopyMemoryToAccelerationStructureKHR(VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) const noexcept + { fp_vkCmdCopyMemoryToAccelerationStructureKHR(commandBuffer, pInfo); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkResult copyMemoryToAccelerationStructureKHR(VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) const noexcept { + VkResult + copyMemoryToAccelerationStructureKHR(VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) const noexcept + { return fp_vkCopyMemoryToAccelerationStructureKHR(device, deferredOperation, pInfo); } #endif #if (defined(VK_KHR_acceleration_structure)) - void cmdWriteAccelerationStructuresPropertiesKHR(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const noexcept { - fp_vkCmdWriteAccelerationStructuresPropertiesKHR(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); + void cmdWriteAccelerationStructuresPropertiesKHR(VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery) const noexcept + { + fp_vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); } #endif #if (defined(VK_NV_ray_tracing)) - void cmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const noexcept { - fp_vkCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); + void cmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery) const noexcept + { + fp_vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); } #endif #if (defined(VK_NV_ray_tracing)) - void cmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) const noexcept { - fp_vkCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset); + void cmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset) const noexcept + { + fp_vkCmdBuildAccelerationStructureNV( + commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkResult writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride) const noexcept { - return fp_vkWriteAccelerationStructuresPropertiesKHR(device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride); + VkResult writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride) const noexcept + { + return fp_vkWriteAccelerationStructuresPropertiesKHR( + device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - void cmdTraceRaysKHR(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth) const noexcept { - fp_vkCmdTraceRaysKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth); + void cmdTraceRaysKHR(VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth) const noexcept + { + fp_vkCmdTraceRaysKHR(commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + width, + height, + depth); } #endif #if (defined(VK_NV_ray_tracing)) - void cmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) const noexcept { - fp_vkCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth); + void cmdTraceRaysNV(VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth) const noexcept + { + fp_vkCmdTraceRaysNV(commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - VkResult getRayTracingShaderGroupHandlesKHR(VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept { + VkResult getRayTracingShaderGroupHandlesKHR( + VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept + { return fp_vkGetRayTracingShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - VkResult getRayTracingCaptureReplayShaderGroupHandlesKHR(VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept { - return fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); + VkResult getRayTracingCaptureReplayShaderGroupHandlesKHR( + VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept + { + return fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + device, pipeline, firstGroup, groupCount, dataSize, pData); } #endif #if (defined(VK_NV_ray_tracing)) - VkResult getAccelerationStructureHandleNV(VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) const noexcept { + VkResult getAccelerationStructureHandleNV(VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData) const noexcept + { return fp_vkGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData); } #endif #if (defined(VK_NV_ray_tracing)) - VkResult createRayTracingPipelinesNV(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { - return fp_vkCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + VkResult createRayTracingPipelinesNV(VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines) const noexcept + { + return fp_vkCreateRayTracingPipelinesNV( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - VkResult createRayTracingPipelinesKHR(VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { - return fp_vkCreateRayTracingPipelinesKHR(device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + VkResult createRayTracingPipelinesKHR(VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines) const noexcept + { + return fp_vkCreateRayTracingPipelinesKHR( + device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - void cmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress) const noexcept { - fp_vkCmdTraceRaysIndirectKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress); + void cmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress) const noexcept + { + fp_vkCmdTraceRaysIndirectKHR(commandBuffer, + pRaygenShaderBindingTable, + pMissShaderBindingTable, + pHitShaderBindingTable, + pCallableShaderBindingTable, + indirectDeviceAddress); } #endif #if (defined(VK_KHR_ray_tracing_maintenance1)) - void cmdTraceRaysIndirect2KHR(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress) const noexcept { + void cmdTraceRaysIndirect2KHR(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress) const noexcept + { fp_vkCmdTraceRaysIndirect2KHR(commandBuffer, indirectDeviceAddress); } #endif #if (defined(VK_KHR_acceleration_structure)) - void getDeviceAccelerationStructureCompatibilityKHR(const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility) const noexcept { + void getDeviceAccelerationStructureCompatibilityKHR( + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility) const noexcept + { fp_vkGetDeviceAccelerationStructureCompatibilityKHR(device, pVersionInfo, pCompatibility); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - VkDeviceSize getRayTracingShaderGroupStackSizeKHR(VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader) const noexcept { + VkDeviceSize getRayTracingShaderGroupStackSizeKHR(VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader) const noexcept + { return fp_vkGetRayTracingShaderGroupStackSizeKHR(device, pipeline, group, groupShader); } #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - void cmdSetRayTracingPipelineStackSizeKHR(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize) const noexcept { + void cmdSetRayTracingPipelineStackSizeKHR(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize) const noexcept + { fp_vkCmdSetRayTracingPipelineStackSizeKHR(commandBuffer, pipelineStackSize); } #endif #if (defined(VK_EXT_full_screen_exclusive)) - VkResult getDeviceGroupSurfacePresentModes2EXT(const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) const noexcept { + VkResult getDeviceGroupSurfacePresentModes2EXT(const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR* pModes) const noexcept + { return fp_vkGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes); } #endif #if (defined(VK_EXT_full_screen_exclusive)) - VkResult acquireFullScreenExclusiveModeEXT(VkSwapchainKHR swapchain) const noexcept { + VkResult acquireFullScreenExclusiveModeEXT(VkSwapchainKHR swapchain) const noexcept + { return fp_vkAcquireFullScreenExclusiveModeEXT(device, swapchain); } #endif #if (defined(VK_EXT_full_screen_exclusive)) - VkResult releaseFullScreenExclusiveModeEXT(VkSwapchainKHR swapchain) const noexcept { + VkResult releaseFullScreenExclusiveModeEXT(VkSwapchainKHR swapchain) const noexcept + { return fp_vkReleaseFullScreenExclusiveModeEXT(device, swapchain); } #endif #if (defined(VK_KHR_performance_query)) - VkResult acquireProfilingLockKHR(const VkAcquireProfilingLockInfoKHR* pInfo) const noexcept { + VkResult acquireProfilingLockKHR(const VkAcquireProfilingLockInfoKHR* pInfo) const noexcept + { return fp_vkAcquireProfilingLockKHR(device, pInfo); } #endif #if (defined(VK_KHR_performance_query)) - void releaseProfilingLockKHR() const noexcept { - fp_vkReleaseProfilingLockKHR(device); - } + void releaseProfilingLockKHR() const noexcept { fp_vkReleaseProfilingLockKHR(device); } #endif #if (defined(VK_EXT_image_drm_format_modifier)) - VkResult getImageDrmFormatModifierPropertiesEXT(VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) const noexcept { + VkResult getImageDrmFormatModifierPropertiesEXT(VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties) const noexcept + { return fp_vkGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties); } #endif #if (defined(VK_VERSION_1_2)) - uint64_t getBufferOpaqueCaptureAddress(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + uint64_t getBufferOpaqueCaptureAddress(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept + { return fp_vkGetBufferOpaqueCaptureAddress(device, pInfo); } #endif #if (defined(VK_VERSION_1_2)) - VkDeviceAddress getBufferDeviceAddress(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + VkDeviceAddress getBufferDeviceAddress(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept + { return fp_vkGetBufferDeviceAddress(device, pInfo); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult initializePerformanceApiINTEL(const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) const noexcept { + VkResult initializePerformanceApiINTEL(const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) const noexcept + { return fp_vkInitializePerformanceApiINTEL(device, pInitializeInfo); } #endif #if (defined(VK_INTEL_performance_query)) - void uninitializePerformanceApiINTEL() const noexcept { - fp_vkUninitializePerformanceApiINTEL(device); - } + void uninitializePerformanceApiINTEL() const noexcept { fp_vkUninitializePerformanceApiINTEL(device); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult cmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) const noexcept { + VkResult cmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo) const noexcept + { return fp_vkCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult cmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) const noexcept { + VkResult cmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) const noexcept + { return fp_vkCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult cmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) const noexcept { + VkResult cmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo) const noexcept + { return fp_vkCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult acquirePerformanceConfigurationINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) const noexcept { + VkResult acquirePerformanceConfigurationINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration) const noexcept + { return fp_vkAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult releasePerformanceConfigurationINTEL(VkPerformanceConfigurationINTEL configuration) const noexcept { + VkResult releasePerformanceConfigurationINTEL(VkPerformanceConfigurationINTEL configuration) const noexcept + { return fp_vkReleasePerformanceConfigurationINTEL(device, configuration); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult queueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) const noexcept { + VkResult queueSetPerformanceConfigurationINTEL(VkQueue queue, + VkPerformanceConfigurationINTEL configuration) const noexcept + { return fp_vkQueueSetPerformanceConfigurationINTEL(queue, configuration); } #endif #if (defined(VK_INTEL_performance_query)) - VkResult getPerformanceParameterINTEL(VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) const noexcept { + VkResult getPerformanceParameterINTEL(VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue) const noexcept + { return fp_vkGetPerformanceParameterINTEL(device, parameter, pValue); } #endif #if (defined(VK_VERSION_1_2)) - uint64_t getDeviceMemoryOpaqueCaptureAddress(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const noexcept { + uint64_t getDeviceMemoryOpaqueCaptureAddress(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const noexcept + { return fp_vkGetDeviceMemoryOpaqueCaptureAddress(device, pInfo); } #endif #if (defined(VK_KHR_pipeline_executable_properties)) - VkResult getPipelineExecutablePropertiesKHR(const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) const noexcept { + VkResult getPipelineExecutablePropertiesKHR(const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties) const noexcept + { return fp_vkGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties); } #endif #if (defined(VK_KHR_pipeline_executable_properties)) - VkResult getPipelineExecutableStatisticsKHR(const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) const noexcept { + VkResult getPipelineExecutableStatisticsKHR(const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics) const noexcept + { return fp_vkGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics); } #endif #if (defined(VK_KHR_pipeline_executable_properties)) - VkResult getPipelineExecutableInternalRepresentationsKHR(const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const noexcept { - return fp_vkGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations); + VkResult getPipelineExecutableInternalRepresentationsKHR( + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const noexcept + { + return fp_vkGetPipelineExecutableInternalRepresentationsKHR( + device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations); } #endif #if (defined(VK_KHR_line_rasterization)) - void cmdSetLineStippleKHR(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) const noexcept { + void cmdSetLineStippleKHR(VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern) const noexcept + { fp_vkCmdSetLineStippleKHR(commandBuffer, lineStippleFactor, lineStipplePattern); } #endif #if (defined(VKSC_VERSION_1_0)) - VkResult getFaultData(VkFaultQueryBehavior faultQueryBehavior, VkBool32* pUnrecordedFaults, uint32_t* pFaultCount, VkFaultData* pFaults) const noexcept { + VkResult getFaultData(VkFaultQueryBehavior faultQueryBehavior, + VkBool32* pUnrecordedFaults, + uint32_t* pFaultCount, + VkFaultData* pFaults) const noexcept + { return fp_vkGetFaultData(device, faultQueryBehavior, pUnrecordedFaults, pFaultCount, pFaults); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkResult createAccelerationStructureKHR(const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure) const noexcept { + VkResult createAccelerationStructureKHR(const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure) const noexcept + { return fp_vkCreateAccelerationStructureKHR(device, pCreateInfo, pAllocator, pAccelerationStructure); } #endif #if (defined(VK_KHR_acceleration_structure)) - void cmdBuildAccelerationStructuresKHR(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) const noexcept { + void cmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) const noexcept + { fp_vkCmdBuildAccelerationStructuresKHR(commandBuffer, infoCount, pInfos, ppBuildRangeInfos); } #endif #if (defined(VK_KHR_acceleration_structure)) - void cmdBuildAccelerationStructuresIndirectKHR(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts) const noexcept { - fp_vkCmdBuildAccelerationStructuresIndirectKHR(commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts); + void cmdBuildAccelerationStructuresIndirectKHR(VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts) const noexcept + { + fp_vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkResult buildAccelerationStructuresKHR(VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) const noexcept { + VkResult buildAccelerationStructuresKHR( + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) const noexcept + { return fp_vkBuildAccelerationStructuresKHR(device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos); } #endif #if (defined(VK_KHR_acceleration_structure)) - VkDeviceAddress getAccelerationStructureDeviceAddressKHR(const VkAccelerationStructureDeviceAddressInfoKHR* pInfo) const noexcept { + VkDeviceAddress + getAccelerationStructureDeviceAddressKHR(const VkAccelerationStructureDeviceAddressInfoKHR* pInfo) const noexcept + { return fp_vkGetAccelerationStructureDeviceAddressKHR(device, pInfo); } #endif #if (defined(VK_KHR_deferred_host_operations)) - VkResult createDeferredOperationKHR(const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation) const noexcept { + VkResult createDeferredOperationKHR(const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation) const noexcept + { return fp_vkCreateDeferredOperationKHR(device, pAllocator, pDeferredOperation); } #endif #if (defined(VK_KHR_deferred_host_operations)) - void destroyDeferredOperationKHR(VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDeferredOperationKHR(VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDeferredOperationKHR(device, operation, pAllocator); } #endif #if (defined(VK_KHR_deferred_host_operations)) - uint32_t getDeferredOperationMaxConcurrencyKHR(VkDeferredOperationKHR operation) const noexcept { + uint32_t getDeferredOperationMaxConcurrencyKHR(VkDeferredOperationKHR operation) const noexcept + { return fp_vkGetDeferredOperationMaxConcurrencyKHR(device, operation); } #endif #if (defined(VK_KHR_deferred_host_operations)) - VkResult getDeferredOperationResultKHR(VkDeferredOperationKHR operation) const noexcept { + VkResult getDeferredOperationResultKHR(VkDeferredOperationKHR operation) const noexcept + { return fp_vkGetDeferredOperationResultKHR(device, operation); } #endif #if (defined(VK_KHR_deferred_host_operations)) - VkResult deferredOperationJoinKHR(VkDeferredOperationKHR operation) const noexcept { + VkResult deferredOperationJoinKHR(VkDeferredOperationKHR operation) const noexcept + { return fp_vkDeferredOperationJoinKHR(device, operation); } #endif #if (defined(VK_NV_device_generated_commands_compute)) - void getPipelineIndirectMemoryRequirementsNV(const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getPipelineIndirectMemoryRequirementsNV(const VkComputePipelineCreateInfo* pCreateInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetPipelineIndirectMemoryRequirementsNV(device, pCreateInfo, pMemoryRequirements); } #endif #if (defined(VK_NV_device_generated_commands_compute)) - VkDeviceAddress getPipelineIndirectDeviceAddressNV(const VkPipelineIndirectDeviceAddressInfoNV* pInfo) const noexcept { + VkDeviceAddress + getPipelineIndirectDeviceAddressNV(const VkPipelineIndirectDeviceAddressInfoNV* pInfo) const noexcept + { return fp_vkGetPipelineIndirectDeviceAddressNV(device, pInfo); } #endif #if (defined(VK_AMD_anti_lag)) - void antiLagUpdateAMD(const VkAntiLagDataAMD* pData) const noexcept { - fp_vkAntiLagUpdateAMD(device, pData); - } + void antiLagUpdateAMD(const VkAntiLagDataAMD* pData) const noexcept { fp_vkAntiLagUpdateAMD(device, pData); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const noexcept { + void cmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const noexcept + { fp_vkCmdSetCullMode(commandBuffer, cullMode); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const noexcept { + void cmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const noexcept + { fp_vkCmdSetFrontFace(commandBuffer, frontFace); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetPrimitiveTopology(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) const noexcept { + void cmdSetPrimitiveTopology(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) const noexcept + { fp_vkCmdSetPrimitiveTopology(commandBuffer, primitiveTopology); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetViewportWithCount(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports) const noexcept { + void cmdSetViewportWithCount(VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports) const noexcept + { fp_vkCmdSetViewportWithCount(commandBuffer, viewportCount, pViewports); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetScissorWithCount(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors) const noexcept { + void cmdSetScissorWithCount(VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors) const noexcept + { fp_vkCmdSetScissorWithCount(commandBuffer, scissorCount, pScissors); } #endif #if (defined(VK_KHR_maintenance5)) - void cmdBindIndexBuffer2KHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType) const noexcept { + void cmdBindIndexBuffer2KHR(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType) const noexcept + { fp_vkCmdBindIndexBuffer2KHR(commandBuffer, buffer, offset, size, indexType); } #endif #if (defined(VK_VERSION_1_3)) - void cmdBindVertexBuffers2(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides) const noexcept { + void cmdBindVertexBuffers2(VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides) const noexcept + { fp_vkCmdBindVertexBuffers2(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetDepthTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const noexcept { + void cmdSetDepthTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const noexcept + { fp_vkCmdSetDepthTestEnable(commandBuffer, depthTestEnable); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetDepthWriteEnable(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const noexcept { + void cmdSetDepthWriteEnable(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const noexcept + { fp_vkCmdSetDepthWriteEnable(commandBuffer, depthWriteEnable); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetDepthCompareOp(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const noexcept { + void cmdSetDepthCompareOp(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const noexcept + { fp_vkCmdSetDepthCompareOp(commandBuffer, depthCompareOp); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) const noexcept { + void cmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) const noexcept + { fp_vkCmdSetDepthBoundsTestEnable(commandBuffer, depthBoundsTestEnable); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetStencilTestEnable(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const noexcept { + void cmdSetStencilTestEnable(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const noexcept + { fp_vkCmdSetStencilTestEnable(commandBuffer, stencilTestEnable); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetStencilOp(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp) const noexcept { + void cmdSetStencilOp(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp) const noexcept + { fp_vkCmdSetStencilOp(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); } #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - void cmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) const noexcept { + void cmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer, uint32_t patchControlPoints) const noexcept + { fp_vkCmdSetPatchControlPointsEXT(commandBuffer, patchControlPoints); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable) const noexcept { + void cmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable) const noexcept + { fp_vkCmdSetRasterizerDiscardEnable(commandBuffer, rasterizerDiscardEnable); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetDepthBiasEnable(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) const noexcept { + void cmdSetDepthBiasEnable(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) const noexcept + { fp_vkCmdSetDepthBiasEnable(commandBuffer, depthBiasEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - void cmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) const noexcept { + void cmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp) const noexcept + { fp_vkCmdSetLogicOpEXT(commandBuffer, logicOp); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable) const noexcept { + void cmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable) const noexcept + { fp_vkCmdSetPrimitiveRestartEnable(commandBuffer, primitiveRestartEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer, VkTessellationDomainOriginKHR domainOrigin) const noexcept { + void cmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer, + VkTessellationDomainOriginKHR domainOrigin) const noexcept + { fp_vkCmdSetTessellationDomainOriginEXT(commandBuffer, domainOrigin); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable) const noexcept { + void cmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable) const noexcept + { fp_vkCmdSetDepthClampEnableEXT(commandBuffer, depthClampEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetPolygonModeEXT(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode) const noexcept { + void cmdSetPolygonModeEXT(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode) const noexcept + { fp_vkCmdSetPolygonModeEXT(commandBuffer, polygonMode); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples) const noexcept { + void cmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer, + VkSampleCountFlagBits rasterizationSamples) const noexcept + { fp_vkCmdSetRasterizationSamplesEXT(commandBuffer, rasterizationSamples); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetSampleMaskEXT(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask) const noexcept { + void cmdSetSampleMaskEXT(VkCommandBuffer commandBuffer, + VkSampleCountFlagBits samples, + const VkSampleMask* pSampleMask) const noexcept + { fp_vkCmdSetSampleMaskEXT(commandBuffer, samples, pSampleMask); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable) const noexcept { + void cmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable) const noexcept + { fp_vkCmdSetAlphaToCoverageEnableEXT(commandBuffer, alphaToCoverageEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable) const noexcept { + void cmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable) const noexcept + { fp_vkCmdSetAlphaToOneEnableEXT(commandBuffer, alphaToOneEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable) const noexcept { + void cmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable) const noexcept + { fp_vkCmdSetLogicOpEnableEXT(commandBuffer, logicOpEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables) const noexcept { + void cmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32* pColorBlendEnables) const noexcept + { fp_vkCmdSetColorBlendEnableEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetColorBlendEquationEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations) const noexcept { + void cmdSetColorBlendEquationEXT(VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT* pColorBlendEquations) const noexcept + { fp_vkCmdSetColorBlendEquationEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks) const noexcept { + void cmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags* pColorWriteMasks) const noexcept + { fp_vkCmdSetColorWriteMaskEXT(commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer, uint32_t rasterizationStream) const noexcept { + void cmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer, uint32_t rasterizationStream) const noexcept + { fp_vkCmdSetRasterizationStreamEXT(commandBuffer, rasterizationStream); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetConservativeRasterizationModeEXT(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode) const noexcept { + void cmdSetConservativeRasterizationModeEXT( + VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode) const noexcept + { fp_vkCmdSetConservativeRasterizationModeEXT(commandBuffer, conservativeRasterizationMode); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetExtraPrimitiveOverestimationSizeEXT(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize) const noexcept { + void cmdSetExtraPrimitiveOverestimationSizeEXT(VkCommandBuffer commandBuffer, + float extraPrimitiveOverestimationSize) const noexcept + { fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT(commandBuffer, extraPrimitiveOverestimationSize); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable) const noexcept { + void cmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable) const noexcept + { fp_vkCmdSetDepthClipEnableEXT(commandBuffer, depthClipEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable) const noexcept { + void cmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable) const noexcept + { fp_vkCmdSetSampleLocationsEnableEXT(commandBuffer, sampleLocationsEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetColorBlendAdvancedEXT(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced) const noexcept { + void cmdSetColorBlendAdvancedEXT(VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT* pColorBlendAdvanced) const noexcept + { fp_vkCmdSetColorBlendAdvancedEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode) const noexcept { + void cmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer, + VkProvokingVertexModeEXT provokingVertexMode) const noexcept + { fp_vkCmdSetProvokingVertexModeEXT(commandBuffer, provokingVertexMode); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetLineRasterizationModeEXT(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode) const noexcept { + void cmdSetLineRasterizationModeEXT(VkCommandBuffer commandBuffer, + VkLineRasterizationModeEXT lineRasterizationMode) const noexcept + { fp_vkCmdSetLineRasterizationModeEXT(commandBuffer, lineRasterizationMode); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable) const noexcept { + void cmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable) const noexcept + { fp_vkCmdSetLineStippleEnableEXT(commandBuffer, stippledLineEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne) const noexcept { + void cmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne) const noexcept + { fp_vkCmdSetDepthClipNegativeOneToOneEXT(commandBuffer, negativeOneToOne); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetViewportWScalingEnableNV(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable) const noexcept { + void cmdSetViewportWScalingEnableNV(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable) const noexcept + { fp_vkCmdSetViewportWScalingEnableNV(commandBuffer, viewportWScalingEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetViewportSwizzleNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles) const noexcept { + void cmdSetViewportSwizzleNV(VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV* pViewportSwizzles) const noexcept + { fp_vkCmdSetViewportSwizzleNV(commandBuffer, firstViewport, viewportCount, pViewportSwizzles); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetCoverageToColorEnableNV(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable) const noexcept { + void cmdSetCoverageToColorEnableNV(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable) const noexcept + { fp_vkCmdSetCoverageToColorEnableNV(commandBuffer, coverageToColorEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetCoverageToColorLocationNV(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation) const noexcept { + void cmdSetCoverageToColorLocationNV(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation) const noexcept + { fp_vkCmdSetCoverageToColorLocationNV(commandBuffer, coverageToColorLocation); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetCoverageModulationModeNV(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode) const noexcept { + void cmdSetCoverageModulationModeNV(VkCommandBuffer commandBuffer, + VkCoverageModulationModeNV coverageModulationMode) const noexcept + { fp_vkCmdSetCoverageModulationModeNV(commandBuffer, coverageModulationMode); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetCoverageModulationTableEnableNV(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable) const noexcept { + void cmdSetCoverageModulationTableEnableNV(VkCommandBuffer commandBuffer, + VkBool32 coverageModulationTableEnable) const noexcept + { fp_vkCmdSetCoverageModulationTableEnableNV(commandBuffer, coverageModulationTableEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetCoverageModulationTableNV(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable) const noexcept { + void cmdSetCoverageModulationTableNV(VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float* pCoverageModulationTable) const noexcept + { fp_vkCmdSetCoverageModulationTableNV(commandBuffer, coverageModulationTableCount, pCoverageModulationTable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetShadingRateImageEnableNV(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable) const noexcept { + void cmdSetShadingRateImageEnableNV(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable) const noexcept + { fp_vkCmdSetShadingRateImageEnableNV(commandBuffer, shadingRateImageEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetCoverageReductionModeNV(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode) const noexcept { + void cmdSetCoverageReductionModeNV(VkCommandBuffer commandBuffer, + VkCoverageReductionModeNV coverageReductionMode) const noexcept + { fp_vkCmdSetCoverageReductionModeNV(commandBuffer, coverageReductionMode); } #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - void cmdSetRepresentativeFragmentTestEnableNV(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable) const noexcept { + void cmdSetRepresentativeFragmentTestEnableNV(VkCommandBuffer commandBuffer, + VkBool32 representativeFragmentTestEnable) const noexcept + { fp_vkCmdSetRepresentativeFragmentTestEnableNV(commandBuffer, representativeFragmentTestEnable); } #endif #if (defined(VK_VERSION_1_3)) - VkResult createPrivateDataSlot(const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot) const noexcept { + VkResult createPrivateDataSlot(const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlotEXT* pPrivateDataSlot) const noexcept + { return fp_vkCreatePrivateDataSlot(device, pCreateInfo, pAllocator, pPrivateDataSlot); } #endif #if (defined(VK_VERSION_1_3)) - void destroyPrivateDataSlot(VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyPrivateDataSlot(VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyPrivateDataSlot(device, privateDataSlot, pAllocator); } #endif #if (defined(VK_VERSION_1_3)) - VkResult setPrivateData(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data) const noexcept { + VkResult setPrivateData(VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data) const noexcept + { return fp_vkSetPrivateData(device, objectType, objectHandle, privateDataSlot, data); } #endif #if (defined(VK_VERSION_1_3)) - void getPrivateData(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData) const noexcept { + void getPrivateData(VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t* pData) const noexcept + { fp_vkGetPrivateData(device, objectType, objectHandle, privateDataSlot, pData); } #endif #if (defined(VK_VERSION_1_3)) - void cmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) const noexcept { + void cmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) const noexcept + { fp_vkCmdCopyBuffer2(commandBuffer, pCopyBufferInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) const noexcept { + void cmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) const noexcept + { fp_vkCmdCopyImage2(commandBuffer, pCopyImageInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) const noexcept { + void cmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) const noexcept + { fp_vkCmdBlitImage2(commandBuffer, pBlitImageInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdCopyBufferToImage2(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) const noexcept { + void cmdCopyBufferToImage2(VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) const noexcept + { fp_vkCmdCopyBufferToImage2(commandBuffer, pCopyBufferToImageInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) const noexcept { + void cmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) const noexcept + { fp_vkCmdCopyImageToBuffer2(commandBuffer, pCopyImageToBufferInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo) const noexcept { + void cmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo) const noexcept + { fp_vkCmdResolveImage2(commandBuffer, pResolveImageInfo); } #endif #if (defined(VK_KHR_object_refresh)) - void cmdRefreshObjectsKHR(VkCommandBuffer commandBuffer, const VkRefreshObjectListKHR* pRefreshObjects) const noexcept { + void cmdRefreshObjectsKHR(VkCommandBuffer commandBuffer, + const VkRefreshObjectListKHR* pRefreshObjects) const noexcept + { fp_vkCmdRefreshObjectsKHR(commandBuffer, pRefreshObjects); } #endif #if (defined(VK_KHR_fragment_shading_rate)) - void cmdSetFragmentShadingRateKHR(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) const noexcept { + void cmdSetFragmentShadingRateKHR(VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) const noexcept + { fp_vkCmdSetFragmentShadingRateKHR(commandBuffer, pFragmentSize, combinerOps); } #endif #if (defined(VK_NV_fragment_shading_rate_enums)) - void cmdSetFragmentShadingRateEnumNV(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) const noexcept { + void cmdSetFragmentShadingRateEnumNV(VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) const noexcept + { fp_vkCmdSetFragmentShadingRateEnumNV(commandBuffer, shadingRate, combinerOps); } #endif #if (defined(VK_KHR_acceleration_structure)) - void getAccelerationStructureBuildSizesKHR(VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo) const noexcept { + void getAccelerationStructureBuildSizesKHR(VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo) const noexcept + { fp_vkGetAccelerationStructureBuildSizesKHR(device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo); } #endif #if (defined(VK_EXT_vertex_input_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetVertexInputEXT(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions) const noexcept { - fp_vkCmdSetVertexInputEXT(commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions); + void cmdSetVertexInputEXT(VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions) const noexcept + { + fp_vkCmdSetVertexInputEXT(commandBuffer, + vertexBindingDescriptionCount, + pVertexBindingDescriptions, + vertexAttributeDescriptionCount, + pVertexAttributeDescriptions); } #endif #if (defined(VK_EXT_color_write_enable)) - void cmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables) const noexcept { + void cmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables) const noexcept + { fp_vkCmdSetColorWriteEnableEXT(commandBuffer, attachmentCount, pColorWriteEnables); } #endif #if (defined(VK_VERSION_1_3)) - void cmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + void cmdSetEvent2(VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfoKHR* pDependencyInfo) const noexcept + { fp_vkCmdSetEvent2(commandBuffer, event, pDependencyInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) const noexcept { + void cmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) const noexcept + { fp_vkCmdResetEvent2(commandBuffer, event, stageMask); } #endif #if (defined(VK_VERSION_1_3)) - void cmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos) const noexcept { + void cmdWaitEvents2(VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfoKHR* pDependencyInfos) const noexcept + { fp_vkCmdWaitEvents2(commandBuffer, eventCount, pEvents, pDependencyInfos); } #endif #if (defined(VK_VERSION_1_3)) - void cmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + void cmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo) const noexcept + { fp_vkCmdPipelineBarrier2(commandBuffer, pDependencyInfo); } #endif #if (defined(VK_VERSION_1_3)) - VkResult queueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence) const noexcept { + VkResult + queueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence) const noexcept + { return fp_vkQueueSubmit2(queue, submitCount, pSubmits, fence); } #endif #if (defined(VK_VERSION_1_3)) - void cmdWriteTimestamp2(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query) const noexcept { + void cmdWriteTimestamp2(VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkQueryPool queryPool, + uint32_t query) const noexcept + { fp_vkCmdWriteTimestamp2(commandBuffer, stage, queryPool, query); } #endif #if (defined(VK_KHR_synchronization2)) - void cmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const noexcept { + void cmdWriteBufferMarker2AMD(VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker) const noexcept + { fp_vkCmdWriteBufferMarker2AMD(commandBuffer, stage, dstBuffer, dstOffset, marker); } #endif #if (defined(VK_KHR_synchronization2)) - void getQueueCheckpointData2NV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData) const noexcept { + void getQueueCheckpointData2NV(VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData) const noexcept + { fp_vkGetQueueCheckpointData2NV(queue, pCheckpointDataCount, pCheckpointData); } #endif #if (defined(VK_EXT_host_image_copy)) - VkResult copyMemoryToImageEXT(const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo) const noexcept { + VkResult copyMemoryToImageEXT(const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo) const noexcept + { return fp_vkCopyMemoryToImageEXT(device, pCopyMemoryToImageInfo); } #endif #if (defined(VK_EXT_host_image_copy)) - VkResult copyImageToMemoryEXT(const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo) const noexcept { + VkResult copyImageToMemoryEXT(const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo) const noexcept + { return fp_vkCopyImageToMemoryEXT(device, pCopyImageToMemoryInfo); } #endif #if (defined(VK_EXT_host_image_copy)) - VkResult copyImageToImageEXT(const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo) const noexcept { + VkResult copyImageToImageEXT(const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo) const noexcept + { return fp_vkCopyImageToImageEXT(device, pCopyImageToImageInfo); } #endif #if (defined(VK_EXT_host_image_copy)) - VkResult transitionImageLayoutEXT(uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions) const noexcept { + VkResult transitionImageLayoutEXT(uint32_t transitionCount, + const VkHostImageLayoutTransitionInfoEXT* pTransitions) const noexcept + { return fp_vkTransitionImageLayoutEXT(device, transitionCount, pTransitions); } #endif #if (defined(VKSC_VERSION_1_0)) - void getCommandPoolMemoryConsumption(VkCommandPool commandPool, VkCommandBuffer commandBuffer, VkCommandPoolMemoryConsumption* pConsumption) const noexcept { + void getCommandPoolMemoryConsumption(VkCommandPool commandPool, + VkCommandBuffer commandBuffer, + VkCommandPoolMemoryConsumption* pConsumption) const noexcept + { fp_vkGetCommandPoolMemoryConsumption(device, commandPool, commandBuffer, pConsumption); } #endif #if (defined(VK_KHR_video_queue)) - VkResult createVideoSessionKHR(const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession) const noexcept { + VkResult createVideoSessionKHR(const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession) const noexcept + { return fp_vkCreateVideoSessionKHR(device, pCreateInfo, pAllocator, pVideoSession); } #endif #if (defined(VK_KHR_video_queue)) - void destroyVideoSessionKHR(VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyVideoSessionKHR(VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyVideoSessionKHR(device, videoSession, pAllocator); } #endif #if (defined(VK_KHR_video_queue)) - VkResult createVideoSessionParametersKHR(const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters) const noexcept { + VkResult createVideoSessionParametersKHR(const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters) const noexcept + { return fp_vkCreateVideoSessionParametersKHR(device, pCreateInfo, pAllocator, pVideoSessionParameters); } #endif #if (defined(VK_KHR_video_queue)) - VkResult updateVideoSessionParametersKHR(VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo) const noexcept { + VkResult updateVideoSessionParametersKHR(VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo) const noexcept + { return fp_vkUpdateVideoSessionParametersKHR(device, videoSessionParameters, pUpdateInfo); } #endif #if (defined(VK_KHR_video_encode_queue)) - VkResult getEncodedVideoSessionParametersKHR(const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, size_t* pDataSize, void* pData) const noexcept { - return fp_vkGetEncodedVideoSessionParametersKHR(device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData); + VkResult + getEncodedVideoSessionParametersKHR(const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, + size_t* pDataSize, + void* pData) const noexcept + { + return fp_vkGetEncodedVideoSessionParametersKHR( + device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData); } #endif #if (defined(VK_KHR_video_queue)) - void destroyVideoSessionParametersKHR(VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyVideoSessionParametersKHR(VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyVideoSessionParametersKHR(device, videoSessionParameters, pAllocator); } #endif #if (defined(VK_KHR_video_queue)) - VkResult getVideoSessionMemoryRequirementsKHR(VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements) const noexcept { - return fp_vkGetVideoSessionMemoryRequirementsKHR(device, videoSession, pMemoryRequirementsCount, pMemoryRequirements); + VkResult + getVideoSessionMemoryRequirementsKHR(VkVideoSessionKHR videoSession, + uint32_t* pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements) const noexcept + { + return fp_vkGetVideoSessionMemoryRequirementsKHR( + device, videoSession, pMemoryRequirementsCount, pMemoryRequirements); } #endif #if (defined(VK_KHR_video_queue)) - VkResult bindVideoSessionMemoryKHR(VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos) const noexcept { - return fp_vkBindVideoSessionMemoryKHR(device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos); + VkResult bindVideoSessionMemoryKHR(VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos) const noexcept + { + return fp_vkBindVideoSessionMemoryKHR( + device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos); } #endif #if (defined(VK_KHR_video_decode_queue)) - void cmdDecodeVideoKHR(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo) const noexcept { + void cmdDecodeVideoKHR(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo) const noexcept + { fp_vkCmdDecodeVideoKHR(commandBuffer, pDecodeInfo); } #endif #if (defined(VK_KHR_video_queue)) - void cmdBeginVideoCodingKHR(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo) const noexcept { + void cmdBeginVideoCodingKHR(VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo) const noexcept + { fp_vkCmdBeginVideoCodingKHR(commandBuffer, pBeginInfo); } #endif #if (defined(VK_KHR_video_queue)) - void cmdControlVideoCodingKHR(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo) const noexcept { + void cmdControlVideoCodingKHR(VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo) const noexcept + { fp_vkCmdControlVideoCodingKHR(commandBuffer, pCodingControlInfo); } #endif #if (defined(VK_KHR_video_queue)) - void cmdEndVideoCodingKHR(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo) const noexcept { + void cmdEndVideoCodingKHR(VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo) const noexcept + { fp_vkCmdEndVideoCodingKHR(commandBuffer, pEndCodingInfo); } #endif #if (defined(VK_KHR_video_encode_queue)) - void cmdEncodeVideoKHR(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo) const noexcept { + void cmdEncodeVideoKHR(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo) const noexcept + { fp_vkCmdEncodeVideoKHR(commandBuffer, pEncodeInfo); } #endif #if (defined(VK_NV_memory_decompression)) - void cmdDecompressMemoryNV(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions) const noexcept { + void cmdDecompressMemoryNV(VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV* pDecompressMemoryRegions) const noexcept + { fp_vkCmdDecompressMemoryNV(commandBuffer, decompressRegionCount, pDecompressMemoryRegions); } #endif #if (defined(VK_NV_memory_decompression)) - void cmdDecompressMemoryIndirectCountNV(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride) const noexcept { - fp_vkCmdDecompressMemoryIndirectCountNV(commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride); + void cmdDecompressMemoryIndirectCountNV(VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride) const noexcept + { + fp_vkCmdDecompressMemoryIndirectCountNV( + commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride); } #endif #if (defined(VK_EXT_descriptor_buffer)) - void getDescriptorSetLayoutSizeEXT(VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes) const noexcept { + void getDescriptorSetLayoutSizeEXT(VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes) const noexcept + { fp_vkGetDescriptorSetLayoutSizeEXT(device, layout, pLayoutSizeInBytes); } #endif #if (defined(VK_EXT_descriptor_buffer)) - void getDescriptorSetLayoutBindingOffsetEXT(VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset) const noexcept { + void getDescriptorSetLayoutBindingOffsetEXT(VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize* pOffset) const noexcept + { fp_vkGetDescriptorSetLayoutBindingOffsetEXT(device, layout, binding, pOffset); } #endif #if (defined(VK_EXT_descriptor_buffer)) - void getDescriptorEXT(const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor) const noexcept { + void + getDescriptorEXT(const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor) const noexcept + { fp_vkGetDescriptorEXT(device, pDescriptorInfo, dataSize, pDescriptor); } #endif #if (defined(VK_EXT_descriptor_buffer)) - void cmdBindDescriptorBuffersEXT(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos) const noexcept { + void cmdBindDescriptorBuffersEXT(VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT* pBindingInfos) const noexcept + { fp_vkCmdBindDescriptorBuffersEXT(commandBuffer, bufferCount, pBindingInfos); } #endif #if (defined(VK_EXT_descriptor_buffer)) - void cmdSetDescriptorBufferOffsetsEXT(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets) const noexcept { - fp_vkCmdSetDescriptorBufferOffsetsEXT(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets); + void cmdSetDescriptorBufferOffsetsEXT(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t* pBufferIndices, + const VkDeviceSize* pOffsets) const noexcept + { + fp_vkCmdSetDescriptorBufferOffsetsEXT( + commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets); } #endif #if (defined(VK_EXT_descriptor_buffer)) - void cmdBindDescriptorBufferEmbeddedSamplersEXT(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set) const noexcept { + void cmdBindDescriptorBufferEmbeddedSamplersEXT(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set) const noexcept + { fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT(commandBuffer, pipelineBindPoint, layout, set); } #endif #if (defined(VK_EXT_descriptor_buffer)) - VkResult getBufferOpaqueCaptureDescriptorDataEXT(const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + VkResult getBufferOpaqueCaptureDescriptorDataEXT(const VkBufferCaptureDescriptorDataInfoEXT* pInfo, + void* pData) const noexcept + { return fp_vkGetBufferOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); } #endif #if (defined(VK_EXT_descriptor_buffer)) - VkResult getImageOpaqueCaptureDescriptorDataEXT(const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + VkResult getImageOpaqueCaptureDescriptorDataEXT(const VkImageCaptureDescriptorDataInfoEXT* pInfo, + void* pData) const noexcept + { return fp_vkGetImageOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); } #endif #if (defined(VK_EXT_descriptor_buffer)) - VkResult getImageViewOpaqueCaptureDescriptorDataEXT(const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + VkResult getImageViewOpaqueCaptureDescriptorDataEXT(const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, + void* pData) const noexcept + { return fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); } #endif #if (defined(VK_EXT_descriptor_buffer)) - VkResult getSamplerOpaqueCaptureDescriptorDataEXT(const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + VkResult getSamplerOpaqueCaptureDescriptorDataEXT(const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, + void* pData) const noexcept + { return fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); } #endif #if (defined(VK_EXT_descriptor_buffer)) - VkResult getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept { + VkResult getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData) const noexcept + { return fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); } #endif #if (defined(VK_EXT_pageable_device_local_memory)) - void setDeviceMemoryPriorityEXT(VkDeviceMemory memory, float priority) const noexcept { + void setDeviceMemoryPriorityEXT(VkDeviceMemory memory, float priority) const noexcept + { fp_vkSetDeviceMemoryPriorityEXT(device, memory, priority); } #endif #if (defined(VK_KHR_present_wait)) - VkResult waitForPresentKHR(VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout) const noexcept { + VkResult waitForPresentKHR(VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout) const noexcept + { return fp_vkWaitForPresentKHR(device, swapchain, presentId, timeout); } #endif #if (defined(VK_FUCHSIA_buffer_collection)) - VkResult createBufferCollectionFUCHSIA(const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection) const noexcept { + VkResult createBufferCollectionFUCHSIA(const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferCollectionFUCHSIA* pCollection) const noexcept + { return fp_vkCreateBufferCollectionFUCHSIA(device, pCreateInfo, pAllocator, pCollection); } #endif #if (defined(VK_FUCHSIA_buffer_collection)) - VkResult setBufferCollectionBufferConstraintsFUCHSIA(VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo) const noexcept { + VkResult setBufferCollectionBufferConstraintsFUCHSIA( + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo) const noexcept + { return fp_vkSetBufferCollectionBufferConstraintsFUCHSIA(device, collection, pBufferConstraintsInfo); } #endif #if (defined(VK_FUCHSIA_buffer_collection)) - VkResult setBufferCollectionImageConstraintsFUCHSIA(VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo) const noexcept { + VkResult setBufferCollectionImageConstraintsFUCHSIA( + VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo) const noexcept + { return fp_vkSetBufferCollectionImageConstraintsFUCHSIA(device, collection, pImageConstraintsInfo); } #endif #if (defined(VK_FUCHSIA_buffer_collection)) - void destroyBufferCollectionFUCHSIA(VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyBufferCollectionFUCHSIA(VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyBufferCollectionFUCHSIA(device, collection, pAllocator); } #endif #if (defined(VK_FUCHSIA_buffer_collection)) - VkResult getBufferCollectionPropertiesFUCHSIA(VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties) const noexcept { + VkResult getBufferCollectionPropertiesFUCHSIA(VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA* pProperties) const noexcept + { return fp_vkGetBufferCollectionPropertiesFUCHSIA(device, collection, pProperties); } #endif #if (defined(VK_NV_cuda_kernel_launch)) - VkResult createCudaModuleNV(const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule) const noexcept { + VkResult createCudaModuleNV(const VkCudaModuleCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaModuleNV* pModule) const noexcept + { return fp_vkCreateCudaModuleNV(device, pCreateInfo, pAllocator, pModule); } #endif #if (defined(VK_NV_cuda_kernel_launch)) - VkResult getCudaModuleCacheNV(VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData) const noexcept { + VkResult getCudaModuleCacheNV(VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData) const noexcept + { return fp_vkGetCudaModuleCacheNV(device, module, pCacheSize, pCacheData); } #endif #if (defined(VK_NV_cuda_kernel_launch)) - VkResult createCudaFunctionNV(const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction) const noexcept { + VkResult createCudaFunctionNV(const VkCudaFunctionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaFunctionNV* pFunction) const noexcept + { return fp_vkCreateCudaFunctionNV(device, pCreateInfo, pAllocator, pFunction); } #endif #if (defined(VK_NV_cuda_kernel_launch)) - void destroyCudaModuleNV(VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyCudaModuleNV(VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyCudaModuleNV(device, module, pAllocator); } #endif #if (defined(VK_NV_cuda_kernel_launch)) - void destroyCudaFunctionNV(VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyCudaFunctionNV(VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyCudaFunctionNV(device, function, pAllocator); } #endif #if (defined(VK_NV_cuda_kernel_launch)) - void cmdCudaLaunchKernelNV(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo) const noexcept { + void cmdCudaLaunchKernelNV(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo) const noexcept + { fp_vkCmdCudaLaunchKernelNV(commandBuffer, pLaunchInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdBeginRendering(VkCommandBuffer commandBuffer, const VkRenderingInfoKHR* pRenderingInfo) const noexcept { + void cmdBeginRendering(VkCommandBuffer commandBuffer, const VkRenderingInfoKHR* pRenderingInfo) const noexcept + { fp_vkCmdBeginRendering(commandBuffer, pRenderingInfo); } #endif #if (defined(VK_VERSION_1_3)) - void cmdEndRendering(VkCommandBuffer commandBuffer) const noexcept { - fp_vkCmdEndRendering(commandBuffer); - } + void cmdEndRendering(VkCommandBuffer commandBuffer) const noexcept { fp_vkCmdEndRendering(commandBuffer); } #endif #if (defined(VK_VALVE_descriptor_set_host_mapping)) - void getDescriptorSetLayoutHostMappingInfoVALVE(const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping) const noexcept { + void + getDescriptorSetLayoutHostMappingInfoVALVE(const VkDescriptorSetBindingReferenceVALVE* pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping) const noexcept + { fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE(device, pBindingReference, pHostMapping); } #endif #if (defined(VK_VALVE_descriptor_set_host_mapping)) - void getDescriptorSetHostMappingVALVE(VkDescriptorSet descriptorSet, void** ppData) const noexcept { + void getDescriptorSetHostMappingVALVE(VkDescriptorSet descriptorSet, void** ppData) const noexcept + { fp_vkGetDescriptorSetHostMappingVALVE(device, descriptorSet, ppData); } #endif #if (defined(VK_EXT_opacity_micromap)) - VkResult createMicromapEXT(const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap) const noexcept { + VkResult createMicromapEXT(const VkMicromapCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkMicromapEXT* pMicromap) const noexcept + { return fp_vkCreateMicromapEXT(device, pCreateInfo, pAllocator, pMicromap); } #endif #if (defined(VK_EXT_opacity_micromap)) - void cmdBuildMicromapsEXT(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos) const noexcept { + void cmdBuildMicromapsEXT(VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos) const noexcept + { fp_vkCmdBuildMicromapsEXT(commandBuffer, infoCount, pInfos); } #endif #if (defined(VK_EXT_opacity_micromap)) - VkResult buildMicromapsEXT(VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos) const noexcept { + VkResult buildMicromapsEXT(VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos) const noexcept + { return fp_vkBuildMicromapsEXT(device, deferredOperation, infoCount, pInfos); } #endif #if (defined(VK_EXT_opacity_micromap)) - void destroyMicromapEXT(VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyMicromapEXT(VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyMicromapEXT(device, micromap, pAllocator); } #endif #if (defined(VK_EXT_opacity_micromap)) - void cmdCopyMicromapEXT(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo) const noexcept { + void cmdCopyMicromapEXT(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo) const noexcept + { fp_vkCmdCopyMicromapEXT(commandBuffer, pInfo); } #endif #if (defined(VK_EXT_opacity_micromap)) - VkResult copyMicromapEXT(VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo) const noexcept { + VkResult copyMicromapEXT(VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapInfoEXT* pInfo) const noexcept + { return fp_vkCopyMicromapEXT(device, deferredOperation, pInfo); } #endif #if (defined(VK_EXT_opacity_micromap)) - void cmdCopyMicromapToMemoryEXT(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo) const noexcept { + void cmdCopyMicromapToMemoryEXT(VkCommandBuffer commandBuffer, + const VkCopyMicromapToMemoryInfoEXT* pInfo) const noexcept + { fp_vkCmdCopyMicromapToMemoryEXT(commandBuffer, pInfo); } #endif #if (defined(VK_EXT_opacity_micromap)) - VkResult copyMicromapToMemoryEXT(VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo) const noexcept { + VkResult copyMicromapToMemoryEXT(VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT* pInfo) const noexcept + { return fp_vkCopyMicromapToMemoryEXT(device, deferredOperation, pInfo); } #endif #if (defined(VK_EXT_opacity_micromap)) - void cmdCopyMemoryToMicromapEXT(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo) const noexcept { + void cmdCopyMemoryToMicromapEXT(VkCommandBuffer commandBuffer, + const VkCopyMemoryToMicromapInfoEXT* pInfo) const noexcept + { fp_vkCmdCopyMemoryToMicromapEXT(commandBuffer, pInfo); } #endif #if (defined(VK_EXT_opacity_micromap)) - VkResult copyMemoryToMicromapEXT(VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo) const noexcept { + VkResult copyMemoryToMicromapEXT(VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT* pInfo) const noexcept + { return fp_vkCopyMemoryToMicromapEXT(device, deferredOperation, pInfo); } #endif #if (defined(VK_EXT_opacity_micromap)) - void cmdWriteMicromapsPropertiesEXT(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const noexcept { + void cmdWriteMicromapsPropertiesEXT(VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery) const noexcept + { fp_vkCmdWriteMicromapsPropertiesEXT(commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery); } #endif #if (defined(VK_EXT_opacity_micromap)) - VkResult writeMicromapsPropertiesEXT(uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride) const noexcept { + VkResult writeMicromapsPropertiesEXT(uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride) const noexcept + { return fp_vkWriteMicromapsPropertiesEXT(device, micromapCount, pMicromaps, queryType, dataSize, pData, stride); } #endif #if (defined(VK_EXT_opacity_micromap)) - void getDeviceMicromapCompatibilityEXT(const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility) const noexcept { + void getDeviceMicromapCompatibilityEXT(const VkMicromapVersionInfoEXT* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility) const noexcept + { fp_vkGetDeviceMicromapCompatibilityEXT(device, pVersionInfo, pCompatibility); } #endif #if (defined(VK_EXT_opacity_micromap)) - void getMicromapBuildSizesEXT(VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo) const noexcept { + void getMicromapBuildSizesEXT(VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT* pBuildInfo, + VkMicromapBuildSizesInfoEXT* pSizeInfo) const noexcept + { fp_vkGetMicromapBuildSizesEXT(device, buildType, pBuildInfo, pSizeInfo); } #endif #if (defined(VK_EXT_shader_module_identifier)) - void getShaderModuleIdentifierEXT(VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier) const noexcept { + void getShaderModuleIdentifierEXT(VkShaderModule shaderModule, + VkShaderModuleIdentifierEXT* pIdentifier) const noexcept + { fp_vkGetShaderModuleIdentifierEXT(device, shaderModule, pIdentifier); } #endif #if (defined(VK_EXT_shader_module_identifier)) - void getShaderModuleCreateInfoIdentifierEXT(const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier) const noexcept { + void getShaderModuleCreateInfoIdentifierEXT(const VkShaderModuleCreateInfo* pCreateInfo, + VkShaderModuleIdentifierEXT* pIdentifier) const noexcept + { fp_vkGetShaderModuleCreateInfoIdentifierEXT(device, pCreateInfo, pIdentifier); } #endif #if (defined(VK_KHR_maintenance5)) - void getImageSubresourceLayout2KHR(VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout) const noexcept { + void getImageSubresourceLayout2KHR(VkImage image, + const VkImageSubresource2EXT* pSubresource, + VkSubresourceLayout2EXT* pLayout) const noexcept + { fp_vkGetImageSubresourceLayout2KHR(device, image, pSubresource, pLayout); } #endif #if (defined(VK_EXT_pipeline_properties)) - VkResult getPipelinePropertiesEXT(const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties) const noexcept { + VkResult getPipelinePropertiesEXT(const VkPipelineInfoEXT* pPipelineInfo, + VkBaseOutStructure* pPipelineProperties) const noexcept + { return fp_vkGetPipelinePropertiesEXT(device, pPipelineInfo, pPipelineProperties); } #endif #if (defined(VK_EXT_metal_objects)) - void exportMetalObjectsEXT(VkExportMetalObjectsInfoEXT* pMetalObjectsInfo) const noexcept { + void exportMetalObjectsEXT(VkExportMetalObjectsInfoEXT* pMetalObjectsInfo) const noexcept + { fp_vkExportMetalObjectsEXT(device, pMetalObjectsInfo); } #endif #if (defined(VK_QCOM_tile_properties)) - VkResult getFramebufferTilePropertiesQCOM(VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties) const noexcept { + VkResult getFramebufferTilePropertiesQCOM(VkFramebuffer framebuffer, + uint32_t* pPropertiesCount, + VkTilePropertiesQCOM* pProperties) const noexcept + { return fp_vkGetFramebufferTilePropertiesQCOM(device, framebuffer, pPropertiesCount, pProperties); } #endif #if (defined(VK_QCOM_tile_properties)) - VkResult getDynamicRenderingTilePropertiesQCOM(const VkRenderingInfoKHR* pRenderingInfo, VkTilePropertiesQCOM* pProperties) const noexcept { + VkResult getDynamicRenderingTilePropertiesQCOM(const VkRenderingInfoKHR* pRenderingInfo, + VkTilePropertiesQCOM* pProperties) const noexcept + { return fp_vkGetDynamicRenderingTilePropertiesQCOM(device, pRenderingInfo, pProperties); } #endif #if (defined(VK_NV_optical_flow)) - VkResult createOpticalFlowSessionNV(const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession) const noexcept { + VkResult createOpticalFlowSessionNV(const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkOpticalFlowSessionNV* pSession) const noexcept + { return fp_vkCreateOpticalFlowSessionNV(device, pCreateInfo, pAllocator, pSession); } #endif #if (defined(VK_NV_optical_flow)) - void destroyOpticalFlowSessionNV(VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyOpticalFlowSessionNV(VkOpticalFlowSessionNV session, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyOpticalFlowSessionNV(device, session, pAllocator); } #endif #if (defined(VK_NV_optical_flow)) - VkResult bindOpticalFlowSessionImageNV(VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout) const noexcept { + VkResult bindOpticalFlowSessionImageNV(VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout) const noexcept + { return fp_vkBindOpticalFlowSessionImageNV(device, session, bindingPoint, view, layout); } #endif #if (defined(VK_NV_optical_flow)) - void cmdOpticalFlowExecuteNV(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo) const noexcept { + void cmdOpticalFlowExecuteNV(VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV* pExecuteInfo) const noexcept + { fp_vkCmdOpticalFlowExecuteNV(commandBuffer, session, pExecuteInfo); } #endif #if (defined(VK_EXT_device_fault)) - VkResult getDeviceFaultInfoEXT(VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo) const noexcept { + VkResult getDeviceFaultInfoEXT(VkDeviceFaultCountsEXT* pFaultCounts, + VkDeviceFaultInfoEXT* pFaultInfo) const noexcept + { return fp_vkGetDeviceFaultInfoEXT(device, pFaultCounts, pFaultInfo); } #endif #if (defined(VK_EXT_depth_bias_control)) - void cmdSetDepthBias2EXT(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo) const noexcept { + void cmdSetDepthBias2EXT(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo) const noexcept + { fp_vkCmdSetDepthBias2EXT(commandBuffer, pDepthBiasInfo); } #endif #if (defined(VK_EXT_swapchain_maintenance1)) - VkResult releaseSwapchainImagesEXT(const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) const noexcept { + VkResult releaseSwapchainImagesEXT(const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) const noexcept + { return fp_vkReleaseSwapchainImagesEXT(device, pReleaseInfo); } #endif #if (defined(VK_KHR_maintenance5)) - void getDeviceImageSubresourceLayoutKHR(const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2EXT* pLayout) const noexcept { + void getDeviceImageSubresourceLayoutKHR(const VkDeviceImageSubresourceInfoKHR* pInfo, + VkSubresourceLayout2EXT* pLayout) const noexcept + { fp_vkGetDeviceImageSubresourceLayoutKHR(device, pInfo, pLayout); } #endif #if (defined(VK_KHR_map_memory2)) - VkResult mapMemory2KHR(const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData) const noexcept { + VkResult mapMemory2KHR(const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData) const noexcept + { return fp_vkMapMemory2KHR(device, pMemoryMapInfo, ppData); } #endif #if (defined(VK_KHR_map_memory2)) - VkResult unmapMemory2KHR(const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo) const noexcept { + VkResult unmapMemory2KHR(const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo) const noexcept + { return fp_vkUnmapMemory2KHR(device, pMemoryUnmapInfo); } #endif #if (defined(VK_EXT_shader_object)) - VkResult createShadersEXT(uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders) const noexcept { + VkResult createShadersEXT(uint32_t createInfoCount, + const VkShaderCreateInfoEXT* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkShaderEXT* pShaders) const noexcept + { return fp_vkCreateShadersEXT(device, createInfoCount, pCreateInfos, pAllocator, pShaders); } #endif #if (defined(VK_EXT_shader_object)) - void destroyShaderEXT(VkShaderEXT shader, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyShaderEXT(VkShaderEXT shader, const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyShaderEXT(device, shader, pAllocator); } #endif #if (defined(VK_EXT_shader_object)) - VkResult getShaderBinaryDataEXT(VkShaderEXT shader, size_t* pDataSize, void* pData) const noexcept { + VkResult getShaderBinaryDataEXT(VkShaderEXT shader, size_t* pDataSize, void* pData) const noexcept + { return fp_vkGetShaderBinaryDataEXT(device, shader, pDataSize, pData); } #endif #if (defined(VK_EXT_shader_object)) - void cmdBindShadersEXT(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders) const noexcept { + void cmdBindShadersEXT(VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits* pStages, + const VkShaderEXT* pShaders) const noexcept + { fp_vkCmdBindShadersEXT(commandBuffer, stageCount, pStages, pShaders); } #endif #if (defined(VK_QNX_external_memory_screen_buffer)) - VkResult getScreenBufferPropertiesQNX(const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties) const noexcept { + VkResult getScreenBufferPropertiesQNX(const struct _screen_buffer* buffer, + VkScreenBufferPropertiesQNX* pProperties) const noexcept + { return fp_vkGetScreenBufferPropertiesQNX(device, buffer, pProperties); } #endif #if (defined(VK_AMDX_shader_enqueue)) - VkResult getExecutionGraphPipelineScratchSizeAMDX(VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo) const noexcept { + VkResult getExecutionGraphPipelineScratchSizeAMDX(VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo) const noexcept + { return fp_vkGetExecutionGraphPipelineScratchSizeAMDX(device, executionGraph, pSizeInfo); } #endif #if (defined(VK_AMDX_shader_enqueue)) - VkResult getExecutionGraphPipelineNodeIndexAMDX(VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex) const noexcept { + VkResult getExecutionGraphPipelineNodeIndexAMDX(VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, + uint32_t* pNodeIndex) const noexcept + { return fp_vkGetExecutionGraphPipelineNodeIndexAMDX(device, executionGraph, pNodeInfo, pNodeIndex); } #endif #if (defined(VK_AMDX_shader_enqueue)) - VkResult createExecutionGraphPipelinesAMDX(VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const noexcept { - return fp_vkCreateExecutionGraphPipelinesAMDX(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + VkResult createExecutionGraphPipelinesAMDX(VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines) const noexcept + { + return fp_vkCreateExecutionGraphPipelinesAMDX( + device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); } #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - void cmdInitializeGraphScratchMemoryAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch) const noexcept { + void cmdInitializeGraphScratchMemoryAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch) const noexcept + { fp_vkCmdInitializeGraphScratchMemoryAMDX(commandBuffer, scratch); } #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - void cmdDispatchGraphAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { + void cmdDispatchGraphAMDX(VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept + { fp_vkCmdDispatchGraphAMDX(commandBuffer, scratch, pCountInfo); } #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - void cmdDispatchGraphIndirectAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept { + void cmdDispatchGraphIndirectAMDX(VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo) const noexcept + { fp_vkCmdDispatchGraphIndirectAMDX(commandBuffer, scratch, pCountInfo); } #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 - void cmdDispatchGraphIndirectCountAMDX(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo) const noexcept { + void cmdDispatchGraphIndirectCountAMDX(VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceAddress countInfo) const noexcept + { fp_vkCmdDispatchGraphIndirectCountAMDX(commandBuffer, scratch, countInfo); } #endif #if (defined(VK_KHR_maintenance6)) - void cmdBindDescriptorSets2KHR(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo) const noexcept { + void cmdBindDescriptorSets2KHR(VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo) const noexcept + { fp_vkCmdBindDescriptorSets2KHR(commandBuffer, pBindDescriptorSetsInfo); } #endif #if (defined(VK_KHR_maintenance6)) - void cmdPushConstants2KHR(VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR* pPushConstantsInfo) const noexcept { + void cmdPushConstants2KHR(VkCommandBuffer commandBuffer, + const VkPushConstantsInfoKHR* pPushConstantsInfo) const noexcept + { fp_vkCmdPushConstants2KHR(commandBuffer, pPushConstantsInfo); } #endif #if (defined(VK_KHR_maintenance6)) - void cmdPushDescriptorSet2KHR(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo) const noexcept { + void cmdPushDescriptorSet2KHR(VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo) const noexcept + { fp_vkCmdPushDescriptorSet2KHR(commandBuffer, pPushDescriptorSetInfo); } #endif #if (defined(VK_KHR_maintenance6)) - void cmdPushDescriptorSetWithTemplate2KHR(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo) const noexcept { + void cmdPushDescriptorSetWithTemplate2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo) const noexcept + { fp_vkCmdPushDescriptorSetWithTemplate2KHR(commandBuffer, pPushDescriptorSetWithTemplateInfo); } #endif #if (defined(VK_KHR_maintenance6)) - void cmdSetDescriptorBufferOffsets2EXT(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo) const noexcept { + void cmdSetDescriptorBufferOffsets2EXT( + VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo) const noexcept + { fp_vkCmdSetDescriptorBufferOffsets2EXT(commandBuffer, pSetDescriptorBufferOffsetsInfo); } #endif #if (defined(VK_KHR_maintenance6)) - void cmdBindDescriptorBufferEmbeddedSamplers2EXT(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo) const noexcept { + void cmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo) const noexcept + { fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo); } #endif #if (defined(VK_NV_low_latency2)) - VkResult setLatencySleepModeNV(VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV* pSleepModeInfo) const noexcept { + VkResult setLatencySleepModeNV(VkSwapchainKHR swapchain, + const VkLatencySleepModeInfoNV* pSleepModeInfo) const noexcept + { return fp_vkSetLatencySleepModeNV(device, swapchain, pSleepModeInfo); } #endif #if (defined(VK_NV_low_latency2)) - VkResult latencySleepNV(VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo) const noexcept { + VkResult latencySleepNV(VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo) const noexcept + { return fp_vkLatencySleepNV(device, swapchain, pSleepInfo); } #endif #if (defined(VK_NV_low_latency2)) - void setLatencyMarkerNV(VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo) const noexcept { + void setLatencyMarkerNV(VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo) const noexcept + { fp_vkSetLatencyMarkerNV(device, swapchain, pLatencyMarkerInfo); } #endif #if ((defined(VK_NV_low_latency2))) && VK_HEADER_VERSION >= 271 - void getLatencyTimingsNV(VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo) const noexcept { + void getLatencyTimingsNV(VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo) const noexcept + { fp_vkGetLatencyTimingsNV(device, swapchain, pLatencyMarkerInfo); } #endif #if (defined(VK_NV_low_latency2)) - void queueNotifyOutOfBandNV(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo) const noexcept { + void queueNotifyOutOfBandNV(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo) const noexcept + { fp_vkQueueNotifyOutOfBandNV(queue, pQueueTypeInfo); } #endif #if (defined(VK_KHR_dynamic_rendering_local_read)) - void cmdSetRenderingAttachmentLocationsKHR(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfoKHR* pLocationInfo) const noexcept { + void cmdSetRenderingAttachmentLocationsKHR(VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfoKHR* pLocationInfo) const noexcept + { fp_vkCmdSetRenderingAttachmentLocationsKHR(commandBuffer, pLocationInfo); } #endif #if (defined(VK_KHR_dynamic_rendering_local_read)) - void cmdSetRenderingInputAttachmentIndicesKHR(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo) const noexcept { + void cmdSetRenderingInputAttachmentIndicesKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo) const noexcept + { fp_vkCmdSetRenderingInputAttachmentIndicesKHR(commandBuffer, pInputAttachmentIndexInfo); } #endif #if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_depth_clamp_control)) - void cmdSetDepthClampRangeEXT(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange) const noexcept { + void cmdSetDepthClampRangeEXT(VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT* pDepthClampRange) const noexcept + { fp_vkCmdSetDepthClampRangeEXT(commandBuffer, depthClampMode, pDepthClampRange); } #endif #if (defined(VK_EXT_host_query_reset)) - void resetQueryPoolEXT(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept { + void resetQueryPoolEXT(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const noexcept + { fp_vkResetQueryPoolEXT(device, queryPool, firstQuery, queryCount); } #endif #if (defined(VK_KHR_maintenance1)) - void trimCommandPoolKHR(VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags) const noexcept { + void trimCommandPoolKHR(VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags) const noexcept + { fp_vkTrimCommandPoolKHR(device, commandPool, flags); } #endif #if (defined(VK_KHR_device_group)) - void getDeviceGroupPeerMemoryFeaturesKHR(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHR* pPeerMemoryFeatures) const noexcept { - fp_vkGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + void getDeviceGroupPeerMemoryFeaturesKHR(uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlagsKHR* pPeerMemoryFeatures) const noexcept + { + fp_vkGetDeviceGroupPeerMemoryFeaturesKHR( + device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); } #endif #if (defined(VK_KHR_bind_memory2)) - VkResult bindBufferMemory2KHR(uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos) const noexcept { + VkResult bindBufferMemory2KHR(uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos) const noexcept + { return fp_vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos); } #endif #if (defined(VK_KHR_bind_memory2)) - VkResult bindImageMemory2KHR(uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const noexcept { + VkResult bindImageMemory2KHR(uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const noexcept + { return fp_vkBindImageMemory2KHR(device, bindInfoCount, pBindInfos); } #endif #if (defined(VK_KHR_device_group)) - void cmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const noexcept { + void cmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const noexcept + { fp_vkCmdSetDeviceMaskKHR(commandBuffer, deviceMask); } #endif #if (defined(VK_KHR_device_group)) - void cmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const noexcept { - fp_vkCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + void cmdDispatchBaseKHR(VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ) const noexcept + { + fp_vkCmdDispatchBaseKHR( + commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); } #endif #if (defined(VK_KHR_descriptor_update_template)) - VkResult createDescriptorUpdateTemplateKHR(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const noexcept { + VkResult createDescriptorUpdateTemplateKHR(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const noexcept + { return fp_vkCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); } #endif #if (defined(VK_KHR_descriptor_update_template)) - void destroyDescriptorUpdateTemplateKHR(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyDescriptorUpdateTemplateKHR(VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator); } #endif #if (defined(VK_KHR_descriptor_update_template)) - void updateDescriptorSetWithTemplateKHR(VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData) const noexcept { + void updateDescriptorSetWithTemplateKHR(VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const void* pData) const noexcept + { fp_vkUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData); } #endif #if (defined(VK_KHR_get_memory_requirements2)) - void getBufferMemoryRequirements2KHR(const VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getBufferMemoryRequirements2KHR(const VkBufferMemoryRequirementsInfo2KHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_KHR_get_memory_requirements2)) - void getImageMemoryRequirements2KHR(const VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getImageMemoryRequirements2KHR(const VkImageMemoryRequirementsInfo2KHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_KHR_get_memory_requirements2)) - void getImageSparseMemoryRequirements2KHR(const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { - fp_vkGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + void + getImageSparseMemoryRequirements2KHR(const VkImageSparseMemoryRequirementsInfo2KHR* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept + { + fp_vkGetImageSparseMemoryRequirements2KHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); } #endif #if (defined(VK_KHR_maintenance4)) - void getDeviceBufferMemoryRequirementsKHR(const VkDeviceBufferMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getDeviceBufferMemoryRequirementsKHR(const VkDeviceBufferMemoryRequirementsKHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetDeviceBufferMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_KHR_maintenance4)) - void getDeviceImageMemoryRequirementsKHR(const VkDeviceImageMemoryRequirementsKHR* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept { + void getDeviceImageMemoryRequirementsKHR(const VkDeviceImageMemoryRequirementsKHR* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements) const noexcept + { fp_vkGetDeviceImageMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); } #endif #if (defined(VK_KHR_maintenance4)) - void getDeviceImageSparseMemoryRequirementsKHR(const VkDeviceImageMemoryRequirementsKHR* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept { - fp_vkGetDeviceImageSparseMemoryRequirementsKHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + void getDeviceImageSparseMemoryRequirementsKHR( + const VkDeviceImageMemoryRequirementsKHR* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements) const noexcept + { + fp_vkGetDeviceImageSparseMemoryRequirementsKHR( + device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); } #endif #if (defined(VK_KHR_sampler_ycbcr_conversion)) - VkResult createSamplerYcbcrConversionKHR(const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversionKHR* pYcbcrConversion) const noexcept { + VkResult createSamplerYcbcrConversionKHR(const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversionKHR* pYcbcrConversion) const noexcept + { return fp_vkCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion); } #endif #if (defined(VK_KHR_sampler_ycbcr_conversion)) - void destroySamplerYcbcrConversionKHR(VkSamplerYcbcrConversionKHR ycbcrConversion, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroySamplerYcbcrConversionKHR(VkSamplerYcbcrConversionKHR ycbcrConversion, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator); } #endif #if (defined(VK_KHR_maintenance3)) - void getDescriptorSetLayoutSupportKHR(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupportKHR* pSupport) const noexcept { + void getDescriptorSetLayoutSupportKHR(const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupportKHR* pSupport) const noexcept + { fp_vkGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport); } #endif #if (defined(VK_EXT_calibrated_timestamps)) - VkResult getCalibratedTimestampsEXT(uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) const noexcept { + VkResult getCalibratedTimestampsEXT(uint32_t timestampCount, + const VkCalibratedTimestampInfoEXT* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation) const noexcept + { return fp_vkGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); } #endif #if (defined(VK_KHR_create_renderpass2)) - VkResult createRenderPass2KHR(const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const noexcept { + VkResult createRenderPass2KHR(const VkRenderPassCreateInfo2KHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass) const noexcept + { return fp_vkCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass); } #endif #if (defined(VK_KHR_create_renderpass2)) - void cmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const noexcept { + void cmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const noexcept + { fp_vkCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); } #endif #if (defined(VK_KHR_create_renderpass2)) - void cmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + void cmdNextSubpass2KHR(VkCommandBuffer commandBuffer, + const VkSubpassBeginInfoKHR* pSubpassBeginInfo, + const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept + { fp_vkCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); } #endif #if (defined(VK_KHR_create_renderpass2)) - void cmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept { + void cmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const noexcept + { fp_vkCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo); } #endif #if (defined(VK_KHR_timeline_semaphore)) - VkResult getSemaphoreCounterValueKHR(VkSemaphore semaphore, uint64_t* pValue) const noexcept { + VkResult getSemaphoreCounterValueKHR(VkSemaphore semaphore, uint64_t* pValue) const noexcept + { return fp_vkGetSemaphoreCounterValueKHR(device, semaphore, pValue); } #endif #if (defined(VK_KHR_timeline_semaphore)) - VkResult waitSemaphoresKHR(const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const noexcept { + VkResult waitSemaphoresKHR(const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const noexcept + { return fp_vkWaitSemaphoresKHR(device, pWaitInfo, timeout); } #endif #if (defined(VK_KHR_timeline_semaphore)) - VkResult signalSemaphoreKHR(const VkSemaphoreSignalInfoKHR* pSignalInfo) const noexcept { + VkResult signalSemaphoreKHR(const VkSemaphoreSignalInfoKHR* pSignalInfo) const noexcept + { return fp_vkSignalSemaphoreKHR(device, pSignalInfo); } #endif #if (defined(VK_AMD_draw_indirect_count)) - void cmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { - fp_vkCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + void cmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride) const noexcept + { + fp_vkCmdDrawIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); } #endif #if (defined(VK_AMD_draw_indirect_count)) - void cmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const noexcept { - fp_vkCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + void cmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride) const noexcept + { + fp_vkCmdDrawIndexedIndirectCountAMD( + commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); } #endif #if (defined(VK_NV_ray_tracing)) - VkResult getRayTracingShaderGroupHandlesNV(VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept { + VkResult getRayTracingShaderGroupHandlesNV( + VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const noexcept + { return fp_vkGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData); } #endif #if (defined(VK_KHR_buffer_device_address)) - uint64_t getBufferOpaqueCaptureAddressKHR(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + uint64_t getBufferOpaqueCaptureAddressKHR(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept + { return fp_vkGetBufferOpaqueCaptureAddressKHR(device, pInfo); } #endif #if (defined(VK_EXT_buffer_device_address)) - VkDeviceAddress getBufferDeviceAddressEXT(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept { + VkDeviceAddress getBufferDeviceAddressEXT(const VkBufferDeviceAddressInfoEXT* pInfo) const noexcept + { return fp_vkGetBufferDeviceAddressEXT(device, pInfo); } #endif #if (defined(VK_KHR_buffer_device_address)) - uint64_t getDeviceMemoryOpaqueCaptureAddressKHR(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const noexcept { + uint64_t + getDeviceMemoryOpaqueCaptureAddressKHR(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const noexcept + { return fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo); } #endif #if (defined(VK_EXT_line_rasterization)) - void cmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) const noexcept { + void cmdSetLineStippleEXT(VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern) const noexcept + { fp_vkCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const noexcept { + void cmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) const noexcept + { fp_vkCmdSetCullModeEXT(commandBuffer, cullMode); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const noexcept { + void cmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) const noexcept + { fp_vkCmdSetFrontFaceEXT(commandBuffer, frontFace); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) const noexcept { + void cmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology) const noexcept + { fp_vkCmdSetPrimitiveTopologyEXT(commandBuffer, primitiveTopology); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports) const noexcept { + void cmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports) const noexcept + { fp_vkCmdSetViewportWithCountEXT(commandBuffer, viewportCount, pViewports); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors) const noexcept { + void cmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors) const noexcept + { fp_vkCmdSetScissorWithCountEXT(commandBuffer, scissorCount, pScissors); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides) const noexcept { + void cmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides) const noexcept + { fp_vkCmdBindVertexBuffers2EXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const noexcept { + void cmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) const noexcept + { fp_vkCmdSetDepthTestEnableEXT(commandBuffer, depthTestEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const noexcept { + void cmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) const noexcept + { fp_vkCmdSetDepthWriteEnableEXT(commandBuffer, depthWriteEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const noexcept { + void cmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) const noexcept + { fp_vkCmdSetDepthCompareOpEXT(commandBuffer, depthCompareOp); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) const noexcept { + void cmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable) const noexcept + { fp_vkCmdSetDepthBoundsTestEnableEXT(commandBuffer, depthBoundsTestEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const noexcept { + void cmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) const noexcept + { fp_vkCmdSetStencilTestEnableEXT(commandBuffer, stencilTestEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - void cmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp) const noexcept { + void cmdSetStencilOpEXT(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp) const noexcept + { fp_vkCmdSetStencilOpEXT(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); } #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - void cmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable) const noexcept { + void cmdSetRasterizerDiscardEnableEXT(VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable) const noexcept + { fp_vkCmdSetRasterizerDiscardEnableEXT(commandBuffer, rasterizerDiscardEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - void cmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) const noexcept { + void cmdSetDepthBiasEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable) const noexcept + { fp_vkCmdSetDepthBiasEnableEXT(commandBuffer, depthBiasEnable); } #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - void cmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable) const noexcept { + void cmdSetPrimitiveRestartEnableEXT(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable) const noexcept + { fp_vkCmdSetPrimitiveRestartEnableEXT(commandBuffer, primitiveRestartEnable); } #endif #if (defined(VK_EXT_private_data)) - VkResult createPrivateDataSlotEXT(const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot) const noexcept { + VkResult createPrivateDataSlotEXT(const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlotEXT* pPrivateDataSlot) const noexcept + { return fp_vkCreatePrivateDataSlotEXT(device, pCreateInfo, pAllocator, pPrivateDataSlot); } #endif #if (defined(VK_EXT_private_data)) - void destroyPrivateDataSlotEXT(VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator) const noexcept { + void destroyPrivateDataSlotEXT(VkPrivateDataSlotEXT privateDataSlot, + const VkAllocationCallbacks* pAllocator) const noexcept + { fp_vkDestroyPrivateDataSlotEXT(device, privateDataSlot, pAllocator); } #endif #if (defined(VK_EXT_private_data)) - VkResult setPrivateDataEXT(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data) const noexcept { + VkResult setPrivateDataEXT(VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t data) const noexcept + { return fp_vkSetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, data); } #endif #if (defined(VK_EXT_private_data)) - void getPrivateDataEXT(VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData) const noexcept { + void getPrivateDataEXT(VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlotEXT privateDataSlot, + uint64_t* pData) const noexcept + { fp_vkGetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, pData); } #endif #if (defined(VK_KHR_copy_commands2)) - void cmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) const noexcept { + void cmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo) const noexcept + { fp_vkCmdCopyBuffer2KHR(commandBuffer, pCopyBufferInfo); } #endif #if (defined(VK_KHR_copy_commands2)) - void cmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) const noexcept { + void cmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo) const noexcept + { fp_vkCmdCopyImage2KHR(commandBuffer, pCopyImageInfo); } #endif #if (defined(VK_KHR_copy_commands2)) - void cmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) const noexcept { + void cmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo) const noexcept + { fp_vkCmdBlitImage2KHR(commandBuffer, pBlitImageInfo); } #endif #if (defined(VK_KHR_copy_commands2)) - void cmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) const noexcept { + void cmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo) const noexcept + { fp_vkCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo); } #endif #if (defined(VK_KHR_copy_commands2)) - void cmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) const noexcept { + void cmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) const noexcept + { fp_vkCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo); } #endif #if (defined(VK_KHR_copy_commands2)) - void cmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo) const noexcept { + void cmdResolveImage2KHR(VkCommandBuffer commandBuffer, + const VkResolveImageInfo2KHR* pResolveImageInfo) const noexcept + { fp_vkCmdResolveImage2KHR(commandBuffer, pResolveImageInfo); } #endif #if (defined(VK_KHR_synchronization2)) - void cmdSetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + void cmdSetEvent2KHR(VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfoKHR* pDependencyInfo) const noexcept + { fp_vkCmdSetEvent2KHR(commandBuffer, event, pDependencyInfo); } #endif #if (defined(VK_KHR_synchronization2)) - void cmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) const noexcept { + void + cmdResetEvent2KHR(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2KHR stageMask) const noexcept + { fp_vkCmdResetEvent2KHR(commandBuffer, event, stageMask); } #endif #if (defined(VK_KHR_synchronization2)) - void cmdWaitEvents2KHR(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfoKHR* pDependencyInfos) const noexcept { + void cmdWaitEvents2KHR(VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfoKHR* pDependencyInfos) const noexcept + { fp_vkCmdWaitEvents2KHR(commandBuffer, eventCount, pEvents, pDependencyInfos); } #endif #if (defined(VK_KHR_synchronization2)) - void cmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer, const VkDependencyInfoKHR* pDependencyInfo) const noexcept { + void cmdPipelineBarrier2KHR(VkCommandBuffer commandBuffer, + const VkDependencyInfoKHR* pDependencyInfo) const noexcept + { fp_vkCmdPipelineBarrier2KHR(commandBuffer, pDependencyInfo); } #endif #if (defined(VK_KHR_synchronization2)) - VkResult queueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence) const noexcept { + VkResult + queueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR* pSubmits, VkFence fence) const noexcept + { return fp_vkQueueSubmit2KHR(queue, submitCount, pSubmits, fence); } #endif #if (defined(VK_KHR_synchronization2)) - void cmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer, VkPipelineStageFlags2KHR stage, VkQueryPool queryPool, uint32_t query) const noexcept { + void cmdWriteTimestamp2KHR(VkCommandBuffer commandBuffer, + VkPipelineStageFlags2KHR stage, + VkQueryPool queryPool, + uint32_t query) const noexcept + { fp_vkCmdWriteTimestamp2KHR(commandBuffer, stage, queryPool, query); } #endif #if (defined(VK_KHR_dynamic_rendering)) - void cmdBeginRenderingKHR(VkCommandBuffer commandBuffer, const VkRenderingInfoKHR* pRenderingInfo) const noexcept { + void cmdBeginRenderingKHR(VkCommandBuffer commandBuffer, const VkRenderingInfoKHR* pRenderingInfo) const noexcept + { fp_vkCmdBeginRenderingKHR(commandBuffer, pRenderingInfo); } #endif #if (defined(VK_KHR_dynamic_rendering)) - void cmdEndRenderingKHR(VkCommandBuffer commandBuffer) const noexcept { - fp_vkCmdEndRenderingKHR(commandBuffer); - } + void cmdEndRenderingKHR(VkCommandBuffer commandBuffer) const noexcept { fp_vkCmdEndRenderingKHR(commandBuffer); } #endif #if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) - void getImageSubresourceLayout2EXT(VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout) const noexcept { + void getImageSubresourceLayout2EXT(VkImage image, + const VkImageSubresource2EXT* pSubresource, + VkSubresourceLayout2EXT* pLayout) const noexcept + { fp_vkGetImageSubresourceLayout2EXT(device, image, pSubresource, pLayout); } #endif - PFN_vkGetDeviceQueue fp_vkGetDeviceQueue = nullptr; - PFN_vkQueueSubmit fp_vkQueueSubmit = nullptr; - PFN_vkQueueWaitIdle fp_vkQueueWaitIdle = nullptr; - PFN_vkDeviceWaitIdle fp_vkDeviceWaitIdle = nullptr; - PFN_vkAllocateMemory fp_vkAllocateMemory = nullptr; - PFN_vkFreeMemory fp_vkFreeMemory = nullptr; - PFN_vkMapMemory fp_vkMapMemory = nullptr; - PFN_vkUnmapMemory fp_vkUnmapMemory = nullptr; - PFN_vkFlushMappedMemoryRanges fp_vkFlushMappedMemoryRanges = nullptr; - PFN_vkInvalidateMappedMemoryRanges fp_vkInvalidateMappedMemoryRanges = nullptr; - PFN_vkGetDeviceMemoryCommitment fp_vkGetDeviceMemoryCommitment = nullptr; - PFN_vkGetBufferMemoryRequirements fp_vkGetBufferMemoryRequirements = nullptr; - PFN_vkBindBufferMemory fp_vkBindBufferMemory = nullptr; - PFN_vkGetImageMemoryRequirements fp_vkGetImageMemoryRequirements = nullptr; - PFN_vkBindImageMemory fp_vkBindImageMemory = nullptr; + PFN_vkGetDeviceQueue fp_vkGetDeviceQueue = nullptr; + PFN_vkQueueSubmit fp_vkQueueSubmit = nullptr; + PFN_vkQueueWaitIdle fp_vkQueueWaitIdle = nullptr; + PFN_vkDeviceWaitIdle fp_vkDeviceWaitIdle = nullptr; + PFN_vkAllocateMemory fp_vkAllocateMemory = nullptr; + PFN_vkFreeMemory fp_vkFreeMemory = nullptr; + PFN_vkMapMemory fp_vkMapMemory = nullptr; + PFN_vkUnmapMemory fp_vkUnmapMemory = nullptr; + PFN_vkFlushMappedMemoryRanges fp_vkFlushMappedMemoryRanges = nullptr; + PFN_vkInvalidateMappedMemoryRanges fp_vkInvalidateMappedMemoryRanges = nullptr; + PFN_vkGetDeviceMemoryCommitment fp_vkGetDeviceMemoryCommitment = nullptr; + PFN_vkGetBufferMemoryRequirements fp_vkGetBufferMemoryRequirements = nullptr; + PFN_vkBindBufferMemory fp_vkBindBufferMemory = nullptr; + PFN_vkGetImageMemoryRequirements fp_vkGetImageMemoryRequirements = nullptr; + PFN_vkBindImageMemory fp_vkBindImageMemory = nullptr; PFN_vkGetImageSparseMemoryRequirements fp_vkGetImageSparseMemoryRequirements = nullptr; - PFN_vkQueueBindSparse fp_vkQueueBindSparse = nullptr; - PFN_vkCreateFence fp_vkCreateFence = nullptr; - PFN_vkDestroyFence fp_vkDestroyFence = nullptr; - PFN_vkResetFences fp_vkResetFences = nullptr; - PFN_vkGetFenceStatus fp_vkGetFenceStatus = nullptr; - PFN_vkWaitForFences fp_vkWaitForFences = nullptr; - PFN_vkCreateSemaphore fp_vkCreateSemaphore = nullptr; - PFN_vkDestroySemaphore fp_vkDestroySemaphore = nullptr; - PFN_vkCreateEvent fp_vkCreateEvent = nullptr; - PFN_vkDestroyEvent fp_vkDestroyEvent = nullptr; - PFN_vkGetEventStatus fp_vkGetEventStatus = nullptr; - PFN_vkSetEvent fp_vkSetEvent = nullptr; - PFN_vkResetEvent fp_vkResetEvent = nullptr; - PFN_vkCreateQueryPool fp_vkCreateQueryPool = nullptr; - PFN_vkDestroyQueryPool fp_vkDestroyQueryPool = nullptr; - PFN_vkGetQueryPoolResults fp_vkGetQueryPoolResults = nullptr; + PFN_vkQueueBindSparse fp_vkQueueBindSparse = nullptr; + PFN_vkCreateFence fp_vkCreateFence = nullptr; + PFN_vkDestroyFence fp_vkDestroyFence = nullptr; + PFN_vkResetFences fp_vkResetFences = nullptr; + PFN_vkGetFenceStatus fp_vkGetFenceStatus = nullptr; + PFN_vkWaitForFences fp_vkWaitForFences = nullptr; + PFN_vkCreateSemaphore fp_vkCreateSemaphore = nullptr; + PFN_vkDestroySemaphore fp_vkDestroySemaphore = nullptr; + PFN_vkCreateEvent fp_vkCreateEvent = nullptr; + PFN_vkDestroyEvent fp_vkDestroyEvent = nullptr; + PFN_vkGetEventStatus fp_vkGetEventStatus = nullptr; + PFN_vkSetEvent fp_vkSetEvent = nullptr; + PFN_vkResetEvent fp_vkResetEvent = nullptr; + PFN_vkCreateQueryPool fp_vkCreateQueryPool = nullptr; + PFN_vkDestroyQueryPool fp_vkDestroyQueryPool = nullptr; + PFN_vkGetQueryPoolResults fp_vkGetQueryPoolResults = nullptr; #if (defined(VK_VERSION_1_2)) PFN_vkResetQueryPool fp_vkResetQueryPool = nullptr; #else - void * fp_vkResetQueryPool{}; + void* fp_vkResetQueryPool{}; #endif - PFN_vkCreateBuffer fp_vkCreateBuffer = nullptr; - PFN_vkDestroyBuffer fp_vkDestroyBuffer = nullptr; - PFN_vkCreateBufferView fp_vkCreateBufferView = nullptr; - PFN_vkDestroyBufferView fp_vkDestroyBufferView = nullptr; - PFN_vkCreateImage fp_vkCreateImage = nullptr; - PFN_vkDestroyImage fp_vkDestroyImage = nullptr; + PFN_vkCreateBuffer fp_vkCreateBuffer = nullptr; + PFN_vkDestroyBuffer fp_vkDestroyBuffer = nullptr; + PFN_vkCreateBufferView fp_vkCreateBufferView = nullptr; + PFN_vkDestroyBufferView fp_vkDestroyBufferView = nullptr; + PFN_vkCreateImage fp_vkCreateImage = nullptr; + PFN_vkDestroyImage fp_vkDestroyImage = nullptr; PFN_vkGetImageSubresourceLayout fp_vkGetImageSubresourceLayout = nullptr; - PFN_vkCreateImageView fp_vkCreateImageView = nullptr; - PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; - PFN_vkCreateShaderModule fp_vkCreateShaderModule = nullptr; - PFN_vkDestroyShaderModule fp_vkDestroyShaderModule = nullptr; - PFN_vkCreatePipelineCache fp_vkCreatePipelineCache = nullptr; - PFN_vkDestroyPipelineCache fp_vkDestroyPipelineCache = nullptr; - PFN_vkGetPipelineCacheData fp_vkGetPipelineCacheData = nullptr; - PFN_vkMergePipelineCaches fp_vkMergePipelineCaches = nullptr; + PFN_vkCreateImageView fp_vkCreateImageView = nullptr; + PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; + PFN_vkCreateShaderModule fp_vkCreateShaderModule = nullptr; + PFN_vkDestroyShaderModule fp_vkDestroyShaderModule = nullptr; + PFN_vkCreatePipelineCache fp_vkCreatePipelineCache = nullptr; + PFN_vkDestroyPipelineCache fp_vkDestroyPipelineCache = nullptr; + PFN_vkGetPipelineCacheData fp_vkGetPipelineCacheData = nullptr; + PFN_vkMergePipelineCaches fp_vkMergePipelineCaches = nullptr; #if (defined(VK_KHR_pipeline_binary)) PFN_vkCreatePipelineBinariesKHR fp_vkCreatePipelineBinariesKHR = nullptr; #else - void * fp_vkCreatePipelineBinariesKHR{}; + void* fp_vkCreatePipelineBinariesKHR{}; #endif #if (defined(VK_KHR_pipeline_binary)) PFN_vkDestroyPipelineBinaryKHR fp_vkDestroyPipelineBinaryKHR = nullptr; #else - void * fp_vkDestroyPipelineBinaryKHR{}; + void* fp_vkDestroyPipelineBinaryKHR{}; #endif #if (defined(VK_KHR_pipeline_binary)) PFN_vkGetPipelineKeyKHR fp_vkGetPipelineKeyKHR = nullptr; #else - void * fp_vkGetPipelineKeyKHR{}; + void* fp_vkGetPipelineKeyKHR{}; #endif #if (defined(VK_KHR_pipeline_binary)) PFN_vkGetPipelineBinaryDataKHR fp_vkGetPipelineBinaryDataKHR = nullptr; #else - void * fp_vkGetPipelineBinaryDataKHR{}; + void* fp_vkGetPipelineBinaryDataKHR{}; #endif #if (defined(VK_KHR_pipeline_binary)) PFN_vkReleaseCapturedPipelineDataKHR fp_vkReleaseCapturedPipelineDataKHR = nullptr; #else - void * fp_vkReleaseCapturedPipelineDataKHR{}; + void* fp_vkReleaseCapturedPipelineDataKHR{}; #endif PFN_vkCreateGraphicsPipelines fp_vkCreateGraphicsPipelines = nullptr; - PFN_vkCreateComputePipelines fp_vkCreateComputePipelines = nullptr; + PFN_vkCreateComputePipelines fp_vkCreateComputePipelines = nullptr; #if (defined(VK_HUAWEI_subpass_shading)) PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = nullptr; #else - void * fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI{}; + void* fp_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI{}; #endif - PFN_vkDestroyPipeline fp_vkDestroyPipeline = nullptr; - PFN_vkCreatePipelineLayout fp_vkCreatePipelineLayout = nullptr; - PFN_vkDestroyPipelineLayout fp_vkDestroyPipelineLayout = nullptr; - PFN_vkCreateSampler fp_vkCreateSampler = nullptr; - PFN_vkDestroySampler fp_vkDestroySampler = nullptr; - PFN_vkCreateDescriptorSetLayout fp_vkCreateDescriptorSetLayout = nullptr; + PFN_vkDestroyPipeline fp_vkDestroyPipeline = nullptr; + PFN_vkCreatePipelineLayout fp_vkCreatePipelineLayout = nullptr; + PFN_vkDestroyPipelineLayout fp_vkDestroyPipelineLayout = nullptr; + PFN_vkCreateSampler fp_vkCreateSampler = nullptr; + PFN_vkDestroySampler fp_vkDestroySampler = nullptr; + PFN_vkCreateDescriptorSetLayout fp_vkCreateDescriptorSetLayout = nullptr; PFN_vkDestroyDescriptorSetLayout fp_vkDestroyDescriptorSetLayout = nullptr; - PFN_vkCreateDescriptorPool fp_vkCreateDescriptorPool = nullptr; - PFN_vkDestroyDescriptorPool fp_vkDestroyDescriptorPool = nullptr; - PFN_vkResetDescriptorPool fp_vkResetDescriptorPool = nullptr; - PFN_vkAllocateDescriptorSets fp_vkAllocateDescriptorSets = nullptr; - PFN_vkFreeDescriptorSets fp_vkFreeDescriptorSets = nullptr; - PFN_vkUpdateDescriptorSets fp_vkUpdateDescriptorSets = nullptr; - PFN_vkCreateFramebuffer fp_vkCreateFramebuffer = nullptr; - PFN_vkDestroyFramebuffer fp_vkDestroyFramebuffer = nullptr; - PFN_vkCreateRenderPass fp_vkCreateRenderPass = nullptr; - PFN_vkDestroyRenderPass fp_vkDestroyRenderPass = nullptr; - PFN_vkGetRenderAreaGranularity fp_vkGetRenderAreaGranularity = nullptr; + PFN_vkCreateDescriptorPool fp_vkCreateDescriptorPool = nullptr; + PFN_vkDestroyDescriptorPool fp_vkDestroyDescriptorPool = nullptr; + PFN_vkResetDescriptorPool fp_vkResetDescriptorPool = nullptr; + PFN_vkAllocateDescriptorSets fp_vkAllocateDescriptorSets = nullptr; + PFN_vkFreeDescriptorSets fp_vkFreeDescriptorSets = nullptr; + PFN_vkUpdateDescriptorSets fp_vkUpdateDescriptorSets = nullptr; + PFN_vkCreateFramebuffer fp_vkCreateFramebuffer = nullptr; + PFN_vkDestroyFramebuffer fp_vkDestroyFramebuffer = nullptr; + PFN_vkCreateRenderPass fp_vkCreateRenderPass = nullptr; + PFN_vkDestroyRenderPass fp_vkDestroyRenderPass = nullptr; + PFN_vkGetRenderAreaGranularity fp_vkGetRenderAreaGranularity = nullptr; #if (defined(VK_KHR_maintenance5)) PFN_vkGetRenderingAreaGranularityKHR fp_vkGetRenderingAreaGranularityKHR = nullptr; #else - void * fp_vkGetRenderingAreaGranularityKHR{}; + void* fp_vkGetRenderingAreaGranularityKHR{}; #endif - PFN_vkCreateCommandPool fp_vkCreateCommandPool = nullptr; - PFN_vkDestroyCommandPool fp_vkDestroyCommandPool = nullptr; - PFN_vkResetCommandPool fp_vkResetCommandPool = nullptr; + PFN_vkCreateCommandPool fp_vkCreateCommandPool = nullptr; + PFN_vkDestroyCommandPool fp_vkDestroyCommandPool = nullptr; + PFN_vkResetCommandPool fp_vkResetCommandPool = nullptr; PFN_vkAllocateCommandBuffers fp_vkAllocateCommandBuffers = nullptr; - PFN_vkFreeCommandBuffers fp_vkFreeCommandBuffers = nullptr; - PFN_vkBeginCommandBuffer fp_vkBeginCommandBuffer = nullptr; - PFN_vkEndCommandBuffer fp_vkEndCommandBuffer = nullptr; - PFN_vkResetCommandBuffer fp_vkResetCommandBuffer = nullptr; - PFN_vkCmdBindPipeline fp_vkCmdBindPipeline = nullptr; + PFN_vkFreeCommandBuffers fp_vkFreeCommandBuffers = nullptr; + PFN_vkBeginCommandBuffer fp_vkBeginCommandBuffer = nullptr; + PFN_vkEndCommandBuffer fp_vkEndCommandBuffer = nullptr; + PFN_vkResetCommandBuffer fp_vkResetCommandBuffer = nullptr; + PFN_vkCmdBindPipeline fp_vkCmdBindPipeline = nullptr; #if (defined(VK_EXT_attachment_feedback_loop_dynamic_state)) PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT fp_vkCmdSetAttachmentFeedbackLoopEnableEXT = nullptr; #else - void * fp_vkCmdSetAttachmentFeedbackLoopEnableEXT{}; + void* fp_vkCmdSetAttachmentFeedbackLoopEnableEXT{}; #endif - PFN_vkCmdSetViewport fp_vkCmdSetViewport = nullptr; - PFN_vkCmdSetScissor fp_vkCmdSetScissor = nullptr; - PFN_vkCmdSetLineWidth fp_vkCmdSetLineWidth = nullptr; - PFN_vkCmdSetDepthBias fp_vkCmdSetDepthBias = nullptr; - PFN_vkCmdSetBlendConstants fp_vkCmdSetBlendConstants = nullptr; - PFN_vkCmdSetDepthBounds fp_vkCmdSetDepthBounds = nullptr; + PFN_vkCmdSetViewport fp_vkCmdSetViewport = nullptr; + PFN_vkCmdSetScissor fp_vkCmdSetScissor = nullptr; + PFN_vkCmdSetLineWidth fp_vkCmdSetLineWidth = nullptr; + PFN_vkCmdSetDepthBias fp_vkCmdSetDepthBias = nullptr; + PFN_vkCmdSetBlendConstants fp_vkCmdSetBlendConstants = nullptr; + PFN_vkCmdSetDepthBounds fp_vkCmdSetDepthBounds = nullptr; PFN_vkCmdSetStencilCompareMask fp_vkCmdSetStencilCompareMask = nullptr; - PFN_vkCmdSetStencilWriteMask fp_vkCmdSetStencilWriteMask = nullptr; - PFN_vkCmdSetStencilReference fp_vkCmdSetStencilReference = nullptr; - PFN_vkCmdBindDescriptorSets fp_vkCmdBindDescriptorSets = nullptr; - PFN_vkCmdBindIndexBuffer fp_vkCmdBindIndexBuffer = nullptr; - PFN_vkCmdBindVertexBuffers fp_vkCmdBindVertexBuffers = nullptr; - PFN_vkCmdDraw fp_vkCmdDraw = nullptr; - PFN_vkCmdDrawIndexed fp_vkCmdDrawIndexed = nullptr; + PFN_vkCmdSetStencilWriteMask fp_vkCmdSetStencilWriteMask = nullptr; + PFN_vkCmdSetStencilReference fp_vkCmdSetStencilReference = nullptr; + PFN_vkCmdBindDescriptorSets fp_vkCmdBindDescriptorSets = nullptr; + PFN_vkCmdBindIndexBuffer fp_vkCmdBindIndexBuffer = nullptr; + PFN_vkCmdBindVertexBuffers fp_vkCmdBindVertexBuffers = nullptr; + PFN_vkCmdDraw fp_vkCmdDraw = nullptr; + PFN_vkCmdDrawIndexed fp_vkCmdDrawIndexed = nullptr; #if (defined(VK_EXT_multi_draw)) PFN_vkCmdDrawMultiEXT fp_vkCmdDrawMultiEXT = nullptr; #else - void * fp_vkCmdDrawMultiEXT{}; + void* fp_vkCmdDrawMultiEXT{}; #endif #if (defined(VK_EXT_multi_draw)) PFN_vkCmdDrawMultiIndexedEXT fp_vkCmdDrawMultiIndexedEXT = nullptr; #else - void * fp_vkCmdDrawMultiIndexedEXT{}; + void* fp_vkCmdDrawMultiIndexedEXT{}; #endif - PFN_vkCmdDrawIndirect fp_vkCmdDrawIndirect = nullptr; + PFN_vkCmdDrawIndirect fp_vkCmdDrawIndirect = nullptr; PFN_vkCmdDrawIndexedIndirect fp_vkCmdDrawIndexedIndirect = nullptr; - PFN_vkCmdDispatch fp_vkCmdDispatch = nullptr; - PFN_vkCmdDispatchIndirect fp_vkCmdDispatchIndirect = nullptr; + PFN_vkCmdDispatch fp_vkCmdDispatch = nullptr; + PFN_vkCmdDispatchIndirect fp_vkCmdDispatchIndirect = nullptr; #if (defined(VK_HUAWEI_subpass_shading)) PFN_vkCmdSubpassShadingHUAWEI fp_vkCmdSubpassShadingHUAWEI = nullptr; #else - void * fp_vkCmdSubpassShadingHUAWEI{}; + void* fp_vkCmdSubpassShadingHUAWEI{}; #endif #if (defined(VK_HUAWEI_cluster_culling_shader)) PFN_vkCmdDrawClusterHUAWEI fp_vkCmdDrawClusterHUAWEI = nullptr; #else - void * fp_vkCmdDrawClusterHUAWEI{}; + void* fp_vkCmdDrawClusterHUAWEI{}; #endif #if (defined(VK_HUAWEI_cluster_culling_shader)) PFN_vkCmdDrawClusterIndirectHUAWEI fp_vkCmdDrawClusterIndirectHUAWEI = nullptr; #else - void * fp_vkCmdDrawClusterIndirectHUAWEI{}; + void* fp_vkCmdDrawClusterIndirectHUAWEI{}; #endif #if (defined(VK_NV_device_generated_commands_compute)) PFN_vkCmdUpdatePipelineIndirectBufferNV fp_vkCmdUpdatePipelineIndirectBufferNV = nullptr; #else - void * fp_vkCmdUpdatePipelineIndirectBufferNV{}; + void* fp_vkCmdUpdatePipelineIndirectBufferNV{}; #endif - PFN_vkCmdCopyBuffer fp_vkCmdCopyBuffer = nullptr; - PFN_vkCmdCopyImage fp_vkCmdCopyImage = nullptr; - PFN_vkCmdBlitImage fp_vkCmdBlitImage = nullptr; + PFN_vkCmdCopyBuffer fp_vkCmdCopyBuffer = nullptr; + PFN_vkCmdCopyImage fp_vkCmdCopyImage = nullptr; + PFN_vkCmdBlitImage fp_vkCmdBlitImage = nullptr; PFN_vkCmdCopyBufferToImage fp_vkCmdCopyBufferToImage = nullptr; PFN_vkCmdCopyImageToBuffer fp_vkCmdCopyImageToBuffer = nullptr; #if (defined(VK_NV_copy_memory_indirect)) PFN_vkCmdCopyMemoryIndirectNV fp_vkCmdCopyMemoryIndirectNV = nullptr; #else - void * fp_vkCmdCopyMemoryIndirectNV{}; + void* fp_vkCmdCopyMemoryIndirectNV{}; #endif #if (defined(VK_NV_copy_memory_indirect)) PFN_vkCmdCopyMemoryToImageIndirectNV fp_vkCmdCopyMemoryToImageIndirectNV = nullptr; #else - void * fp_vkCmdCopyMemoryToImageIndirectNV{}; + void* fp_vkCmdCopyMemoryToImageIndirectNV{}; #endif - PFN_vkCmdUpdateBuffer fp_vkCmdUpdateBuffer = nullptr; - PFN_vkCmdFillBuffer fp_vkCmdFillBuffer = nullptr; - PFN_vkCmdClearColorImage fp_vkCmdClearColorImage = nullptr; + PFN_vkCmdUpdateBuffer fp_vkCmdUpdateBuffer = nullptr; + PFN_vkCmdFillBuffer fp_vkCmdFillBuffer = nullptr; + PFN_vkCmdClearColorImage fp_vkCmdClearColorImage = nullptr; PFN_vkCmdClearDepthStencilImage fp_vkCmdClearDepthStencilImage = nullptr; - PFN_vkCmdClearAttachments fp_vkCmdClearAttachments = nullptr; - PFN_vkCmdResolveImage fp_vkCmdResolveImage = nullptr; - PFN_vkCmdSetEvent fp_vkCmdSetEvent = nullptr; - PFN_vkCmdResetEvent fp_vkCmdResetEvent = nullptr; - PFN_vkCmdWaitEvents fp_vkCmdWaitEvents = nullptr; - PFN_vkCmdPipelineBarrier fp_vkCmdPipelineBarrier = nullptr; - PFN_vkCmdBeginQuery fp_vkCmdBeginQuery = nullptr; - PFN_vkCmdEndQuery fp_vkCmdEndQuery = nullptr; + PFN_vkCmdClearAttachments fp_vkCmdClearAttachments = nullptr; + PFN_vkCmdResolveImage fp_vkCmdResolveImage = nullptr; + PFN_vkCmdSetEvent fp_vkCmdSetEvent = nullptr; + PFN_vkCmdResetEvent fp_vkCmdResetEvent = nullptr; + PFN_vkCmdWaitEvents fp_vkCmdWaitEvents = nullptr; + PFN_vkCmdPipelineBarrier fp_vkCmdPipelineBarrier = nullptr; + PFN_vkCmdBeginQuery fp_vkCmdBeginQuery = nullptr; + PFN_vkCmdEndQuery fp_vkCmdEndQuery = nullptr; #if (defined(VK_EXT_conditional_rendering)) PFN_vkCmdBeginConditionalRenderingEXT fp_vkCmdBeginConditionalRenderingEXT = nullptr; #else - void * fp_vkCmdBeginConditionalRenderingEXT{}; + void* fp_vkCmdBeginConditionalRenderingEXT{}; #endif #if (defined(VK_EXT_conditional_rendering)) PFN_vkCmdEndConditionalRenderingEXT fp_vkCmdEndConditionalRenderingEXT = nullptr; #else - void * fp_vkCmdEndConditionalRenderingEXT{}; + void* fp_vkCmdEndConditionalRenderingEXT{}; #endif - PFN_vkCmdResetQueryPool fp_vkCmdResetQueryPool = nullptr; - PFN_vkCmdWriteTimestamp fp_vkCmdWriteTimestamp = nullptr; + PFN_vkCmdResetQueryPool fp_vkCmdResetQueryPool = nullptr; + PFN_vkCmdWriteTimestamp fp_vkCmdWriteTimestamp = nullptr; PFN_vkCmdCopyQueryPoolResults fp_vkCmdCopyQueryPoolResults = nullptr; - PFN_vkCmdPushConstants fp_vkCmdPushConstants = nullptr; - PFN_vkCmdBeginRenderPass fp_vkCmdBeginRenderPass = nullptr; - PFN_vkCmdNextSubpass fp_vkCmdNextSubpass = nullptr; - PFN_vkCmdEndRenderPass fp_vkCmdEndRenderPass = nullptr; - PFN_vkCmdExecuteCommands fp_vkCmdExecuteCommands = nullptr; + PFN_vkCmdPushConstants fp_vkCmdPushConstants = nullptr; + PFN_vkCmdBeginRenderPass fp_vkCmdBeginRenderPass = nullptr; + PFN_vkCmdNextSubpass fp_vkCmdNextSubpass = nullptr; + PFN_vkCmdEndRenderPass fp_vkCmdEndRenderPass = nullptr; + PFN_vkCmdExecuteCommands fp_vkCmdExecuteCommands = nullptr; #if (defined(VK_KHR_display_swapchain)) PFN_vkCreateSharedSwapchainsKHR fp_vkCreateSharedSwapchainsKHR = nullptr; #else - void * fp_vkCreateSharedSwapchainsKHR{}; + void* fp_vkCreateSharedSwapchainsKHR{}; #endif #if (defined(VK_KHR_swapchain)) PFN_vkCreateSwapchainKHR fp_vkCreateSwapchainKHR = nullptr; #else - void * fp_vkCreateSwapchainKHR{}; + void* fp_vkCreateSwapchainKHR{}; #endif #if (defined(VK_KHR_swapchain)) PFN_vkDestroySwapchainKHR fp_vkDestroySwapchainKHR = nullptr; #else - void * fp_vkDestroySwapchainKHR{}; + void* fp_vkDestroySwapchainKHR{}; #endif #if (defined(VK_KHR_swapchain)) PFN_vkGetSwapchainImagesKHR fp_vkGetSwapchainImagesKHR = nullptr; #else - void * fp_vkGetSwapchainImagesKHR{}; + void* fp_vkGetSwapchainImagesKHR{}; #endif #if (defined(VK_KHR_swapchain)) PFN_vkAcquireNextImageKHR fp_vkAcquireNextImageKHR = nullptr; #else - void * fp_vkAcquireNextImageKHR{}; + void* fp_vkAcquireNextImageKHR{}; #endif #if (defined(VK_KHR_swapchain)) PFN_vkQueuePresentKHR fp_vkQueuePresentKHR = nullptr; #else - void * fp_vkQueuePresentKHR{}; + void* fp_vkQueuePresentKHR{}; #endif #if (defined(VK_EXT_debug_marker)) PFN_vkDebugMarkerSetObjectNameEXT fp_vkDebugMarkerSetObjectNameEXT = nullptr; #else - void * fp_vkDebugMarkerSetObjectNameEXT{}; + void* fp_vkDebugMarkerSetObjectNameEXT{}; #endif #if (defined(VK_EXT_debug_marker)) PFN_vkDebugMarkerSetObjectTagEXT fp_vkDebugMarkerSetObjectTagEXT = nullptr; #else - void * fp_vkDebugMarkerSetObjectTagEXT{}; + void* fp_vkDebugMarkerSetObjectTagEXT{}; #endif #if (defined(VK_EXT_debug_marker)) PFN_vkCmdDebugMarkerBeginEXT fp_vkCmdDebugMarkerBeginEXT = nullptr; #else - void * fp_vkCmdDebugMarkerBeginEXT{}; + void* fp_vkCmdDebugMarkerBeginEXT{}; #endif #if (defined(VK_EXT_debug_marker)) PFN_vkCmdDebugMarkerEndEXT fp_vkCmdDebugMarkerEndEXT = nullptr; #else - void * fp_vkCmdDebugMarkerEndEXT{}; + void* fp_vkCmdDebugMarkerEndEXT{}; #endif #if (defined(VK_EXT_debug_marker)) PFN_vkCmdDebugMarkerInsertEXT fp_vkCmdDebugMarkerInsertEXT = nullptr; #else - void * fp_vkCmdDebugMarkerInsertEXT{}; + void* fp_vkCmdDebugMarkerInsertEXT{}; #endif #if (defined(VK_NV_external_memory_win32)) PFN_vkGetMemoryWin32HandleNV fp_vkGetMemoryWin32HandleNV = nullptr; #else - void * fp_vkGetMemoryWin32HandleNV{}; + void* fp_vkGetMemoryWin32HandleNV{}; #endif #if (defined(VK_NV_device_generated_commands)) PFN_vkCmdExecuteGeneratedCommandsNV fp_vkCmdExecuteGeneratedCommandsNV = nullptr; #else - void * fp_vkCmdExecuteGeneratedCommandsNV{}; + void* fp_vkCmdExecuteGeneratedCommandsNV{}; #endif #if (defined(VK_NV_device_generated_commands)) PFN_vkCmdPreprocessGeneratedCommandsNV fp_vkCmdPreprocessGeneratedCommandsNV = nullptr; #else - void * fp_vkCmdPreprocessGeneratedCommandsNV{}; + void* fp_vkCmdPreprocessGeneratedCommandsNV{}; #endif #if (defined(VK_NV_device_generated_commands)) PFN_vkCmdBindPipelineShaderGroupNV fp_vkCmdBindPipelineShaderGroupNV = nullptr; #else - void * fp_vkCmdBindPipelineShaderGroupNV{}; + void* fp_vkCmdBindPipelineShaderGroupNV{}; #endif #if (defined(VK_NV_device_generated_commands)) PFN_vkGetGeneratedCommandsMemoryRequirementsNV fp_vkGetGeneratedCommandsMemoryRequirementsNV = nullptr; #else - void * fp_vkGetGeneratedCommandsMemoryRequirementsNV{}; + void* fp_vkGetGeneratedCommandsMemoryRequirementsNV{}; #endif #if (defined(VK_NV_device_generated_commands)) PFN_vkCreateIndirectCommandsLayoutNV fp_vkCreateIndirectCommandsLayoutNV = nullptr; #else - void * fp_vkCreateIndirectCommandsLayoutNV{}; + void* fp_vkCreateIndirectCommandsLayoutNV{}; #endif #if (defined(VK_NV_device_generated_commands)) PFN_vkDestroyIndirectCommandsLayoutNV fp_vkDestroyIndirectCommandsLayoutNV = nullptr; #else - void * fp_vkDestroyIndirectCommandsLayoutNV{}; + void* fp_vkDestroyIndirectCommandsLayoutNV{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkCmdExecuteGeneratedCommandsEXT fp_vkCmdExecuteGeneratedCommandsEXT = nullptr; #else - void * fp_vkCmdExecuteGeneratedCommandsEXT{}; + void* fp_vkCmdExecuteGeneratedCommandsEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkCmdPreprocessGeneratedCommandsEXT fp_vkCmdPreprocessGeneratedCommandsEXT = nullptr; #else - void * fp_vkCmdPreprocessGeneratedCommandsEXT{}; + void* fp_vkCmdPreprocessGeneratedCommandsEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkGetGeneratedCommandsMemoryRequirementsEXT fp_vkGetGeneratedCommandsMemoryRequirementsEXT = nullptr; #else - void * fp_vkGetGeneratedCommandsMemoryRequirementsEXT{}; + void* fp_vkGetGeneratedCommandsMemoryRequirementsEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkCreateIndirectCommandsLayoutEXT fp_vkCreateIndirectCommandsLayoutEXT = nullptr; #else - void * fp_vkCreateIndirectCommandsLayoutEXT{}; + void* fp_vkCreateIndirectCommandsLayoutEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkDestroyIndirectCommandsLayoutEXT fp_vkDestroyIndirectCommandsLayoutEXT = nullptr; #else - void * fp_vkDestroyIndirectCommandsLayoutEXT{}; + void* fp_vkDestroyIndirectCommandsLayoutEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkCreateIndirectExecutionSetEXT fp_vkCreateIndirectExecutionSetEXT = nullptr; #else - void * fp_vkCreateIndirectExecutionSetEXT{}; + void* fp_vkCreateIndirectExecutionSetEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkDestroyIndirectExecutionSetEXT fp_vkDestroyIndirectExecutionSetEXT = nullptr; #else - void * fp_vkDestroyIndirectExecutionSetEXT{}; + void* fp_vkDestroyIndirectExecutionSetEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkUpdateIndirectExecutionSetPipelineEXT fp_vkUpdateIndirectExecutionSetPipelineEXT = nullptr; #else - void * fp_vkUpdateIndirectExecutionSetPipelineEXT{}; + void* fp_vkUpdateIndirectExecutionSetPipelineEXT{}; #endif #if (defined(VK_EXT_device_generated_commands)) PFN_vkUpdateIndirectExecutionSetShaderEXT fp_vkUpdateIndirectExecutionSetShaderEXT = nullptr; #else - void * fp_vkUpdateIndirectExecutionSetShaderEXT{}; + void* fp_vkUpdateIndirectExecutionSetShaderEXT{}; #endif #if (defined(VK_KHR_push_descriptor)) PFN_vkCmdPushDescriptorSetKHR fp_vkCmdPushDescriptorSetKHR = nullptr; #else - void * fp_vkCmdPushDescriptorSetKHR{}; + void* fp_vkCmdPushDescriptorSetKHR{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkTrimCommandPool fp_vkTrimCommandPool = nullptr; #else - void * fp_vkTrimCommandPool{}; + void* fp_vkTrimCommandPool{}; #endif #if (defined(VK_KHR_external_memory_win32)) PFN_vkGetMemoryWin32HandleKHR fp_vkGetMemoryWin32HandleKHR = nullptr; #else - void * fp_vkGetMemoryWin32HandleKHR{}; + void* fp_vkGetMemoryWin32HandleKHR{}; #endif #if (defined(VK_KHR_external_memory_win32)) PFN_vkGetMemoryWin32HandlePropertiesKHR fp_vkGetMemoryWin32HandlePropertiesKHR = nullptr; #else - void * fp_vkGetMemoryWin32HandlePropertiesKHR{}; + void* fp_vkGetMemoryWin32HandlePropertiesKHR{}; #endif #if (defined(VK_KHR_external_memory_fd)) PFN_vkGetMemoryFdKHR fp_vkGetMemoryFdKHR = nullptr; #else - void * fp_vkGetMemoryFdKHR{}; + void* fp_vkGetMemoryFdKHR{}; #endif #if (defined(VK_KHR_external_memory_fd)) PFN_vkGetMemoryFdPropertiesKHR fp_vkGetMemoryFdPropertiesKHR = nullptr; #else - void * fp_vkGetMemoryFdPropertiesKHR{}; + void* fp_vkGetMemoryFdPropertiesKHR{}; #endif #if (defined(VK_FUCHSIA_external_memory)) PFN_vkGetMemoryZirconHandleFUCHSIA fp_vkGetMemoryZirconHandleFUCHSIA = nullptr; #else - void * fp_vkGetMemoryZirconHandleFUCHSIA{}; + void* fp_vkGetMemoryZirconHandleFUCHSIA{}; #endif #if (defined(VK_FUCHSIA_external_memory)) PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA fp_vkGetMemoryZirconHandlePropertiesFUCHSIA = nullptr; #else - void * fp_vkGetMemoryZirconHandlePropertiesFUCHSIA{}; + void* fp_vkGetMemoryZirconHandlePropertiesFUCHSIA{}; #endif #if (defined(VK_NV_external_memory_rdma)) PFN_vkGetMemoryRemoteAddressNV fp_vkGetMemoryRemoteAddressNV = nullptr; #else - void * fp_vkGetMemoryRemoteAddressNV{}; + void* fp_vkGetMemoryRemoteAddressNV{}; #endif #if (defined(VK_NV_external_memory_sci_buf)) PFN_vkGetMemorySciBufNV fp_vkGetMemorySciBufNV = nullptr; #else - void * fp_vkGetMemorySciBufNV{}; + void* fp_vkGetMemorySciBufNV{}; #endif #if (defined(VK_KHR_external_semaphore_win32)) PFN_vkGetSemaphoreWin32HandleKHR fp_vkGetSemaphoreWin32HandleKHR = nullptr; #else - void * fp_vkGetSemaphoreWin32HandleKHR{}; + void* fp_vkGetSemaphoreWin32HandleKHR{}; #endif #if (defined(VK_KHR_external_semaphore_win32)) PFN_vkImportSemaphoreWin32HandleKHR fp_vkImportSemaphoreWin32HandleKHR = nullptr; #else - void * fp_vkImportSemaphoreWin32HandleKHR{}; + void* fp_vkImportSemaphoreWin32HandleKHR{}; #endif #if (defined(VK_KHR_external_semaphore_fd)) PFN_vkGetSemaphoreFdKHR fp_vkGetSemaphoreFdKHR = nullptr; #else - void * fp_vkGetSemaphoreFdKHR{}; + void* fp_vkGetSemaphoreFdKHR{}; #endif #if (defined(VK_KHR_external_semaphore_fd)) PFN_vkImportSemaphoreFdKHR fp_vkImportSemaphoreFdKHR = nullptr; #else - void * fp_vkImportSemaphoreFdKHR{}; + void* fp_vkImportSemaphoreFdKHR{}; #endif #if (defined(VK_FUCHSIA_external_semaphore)) PFN_vkGetSemaphoreZirconHandleFUCHSIA fp_vkGetSemaphoreZirconHandleFUCHSIA = nullptr; #else - void * fp_vkGetSemaphoreZirconHandleFUCHSIA{}; + void* fp_vkGetSemaphoreZirconHandleFUCHSIA{}; #endif #if (defined(VK_FUCHSIA_external_semaphore)) PFN_vkImportSemaphoreZirconHandleFUCHSIA fp_vkImportSemaphoreZirconHandleFUCHSIA = nullptr; #else - void * fp_vkImportSemaphoreZirconHandleFUCHSIA{}; + void* fp_vkImportSemaphoreZirconHandleFUCHSIA{}; #endif #if (defined(VK_KHR_external_fence_win32)) PFN_vkGetFenceWin32HandleKHR fp_vkGetFenceWin32HandleKHR = nullptr; #else - void * fp_vkGetFenceWin32HandleKHR{}; + void* fp_vkGetFenceWin32HandleKHR{}; #endif #if (defined(VK_KHR_external_fence_win32)) PFN_vkImportFenceWin32HandleKHR fp_vkImportFenceWin32HandleKHR = nullptr; #else - void * fp_vkImportFenceWin32HandleKHR{}; + void* fp_vkImportFenceWin32HandleKHR{}; #endif #if (defined(VK_KHR_external_fence_fd)) PFN_vkGetFenceFdKHR fp_vkGetFenceFdKHR = nullptr; #else - void * fp_vkGetFenceFdKHR{}; + void* fp_vkGetFenceFdKHR{}; #endif #if (defined(VK_KHR_external_fence_fd)) PFN_vkImportFenceFdKHR fp_vkImportFenceFdKHR = nullptr; #else - void * fp_vkImportFenceFdKHR{}; + void* fp_vkImportFenceFdKHR{}; #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) PFN_vkGetFenceSciSyncFenceNV fp_vkGetFenceSciSyncFenceNV = nullptr; #else - void * fp_vkGetFenceSciSyncFenceNV{}; + void* fp_vkGetFenceSciSyncFenceNV{}; #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) PFN_vkGetFenceSciSyncObjNV fp_vkGetFenceSciSyncObjNV = nullptr; #else - void * fp_vkGetFenceSciSyncObjNV{}; + void* fp_vkGetFenceSciSyncObjNV{}; #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) PFN_vkImportFenceSciSyncFenceNV fp_vkImportFenceSciSyncFenceNV = nullptr; #else - void * fp_vkImportFenceSciSyncFenceNV{}; + void* fp_vkImportFenceSciSyncFenceNV{}; #endif #if (defined(VK_NV_external_sci_sync)) || (defined(VK_NV_external_sci_sync2)) PFN_vkImportFenceSciSyncObjNV fp_vkImportFenceSciSyncObjNV = nullptr; #else - void * fp_vkImportFenceSciSyncObjNV{}; + void* fp_vkImportFenceSciSyncObjNV{}; #endif #if (defined(VK_NV_external_sci_sync)) PFN_vkGetSemaphoreSciSyncObjNV fp_vkGetSemaphoreSciSyncObjNV = nullptr; #else - void * fp_vkGetSemaphoreSciSyncObjNV{}; + void* fp_vkGetSemaphoreSciSyncObjNV{}; #endif #if (defined(VK_NV_external_sci_sync)) PFN_vkImportSemaphoreSciSyncObjNV fp_vkImportSemaphoreSciSyncObjNV = nullptr; #else - void * fp_vkImportSemaphoreSciSyncObjNV{}; + void* fp_vkImportSemaphoreSciSyncObjNV{}; #endif #if (defined(VK_NV_external_sci_sync2)) PFN_vkCreateSemaphoreSciSyncPoolNV fp_vkCreateSemaphoreSciSyncPoolNV = nullptr; #else - void * fp_vkCreateSemaphoreSciSyncPoolNV{}; + void* fp_vkCreateSemaphoreSciSyncPoolNV{}; #endif #if (defined(VK_NV_external_sci_sync2)) PFN_vkDestroySemaphoreSciSyncPoolNV fp_vkDestroySemaphoreSciSyncPoolNV = nullptr; #else - void * fp_vkDestroySemaphoreSciSyncPoolNV{}; + void* fp_vkDestroySemaphoreSciSyncPoolNV{}; #endif #if (defined(VK_EXT_display_control)) PFN_vkDisplayPowerControlEXT fp_vkDisplayPowerControlEXT = nullptr; #else - void * fp_vkDisplayPowerControlEXT{}; + void* fp_vkDisplayPowerControlEXT{}; #endif #if (defined(VK_EXT_display_control)) PFN_vkRegisterDeviceEventEXT fp_vkRegisterDeviceEventEXT = nullptr; #else - void * fp_vkRegisterDeviceEventEXT{}; + void* fp_vkRegisterDeviceEventEXT{}; #endif #if (defined(VK_EXT_display_control)) PFN_vkRegisterDisplayEventEXT fp_vkRegisterDisplayEventEXT = nullptr; #else - void * fp_vkRegisterDisplayEventEXT{}; + void* fp_vkRegisterDisplayEventEXT{}; #endif #if (defined(VK_EXT_display_control)) PFN_vkGetSwapchainCounterEXT fp_vkGetSwapchainCounterEXT = nullptr; #else - void * fp_vkGetSwapchainCounterEXT{}; + void* fp_vkGetSwapchainCounterEXT{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetDeviceGroupPeerMemoryFeatures fp_vkGetDeviceGroupPeerMemoryFeatures = nullptr; #else - void * fp_vkGetDeviceGroupPeerMemoryFeatures{}; + void* fp_vkGetDeviceGroupPeerMemoryFeatures{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkBindBufferMemory2 fp_vkBindBufferMemory2 = nullptr; #else - void * fp_vkBindBufferMemory2{}; + void* fp_vkBindBufferMemory2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkBindImageMemory2 fp_vkBindImageMemory2 = nullptr; #else - void * fp_vkBindImageMemory2{}; + void* fp_vkBindImageMemory2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkCmdSetDeviceMask fp_vkCmdSetDeviceMask = nullptr; #else - void * fp_vkCmdSetDeviceMask{}; + void* fp_vkCmdSetDeviceMask{}; #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) PFN_vkGetDeviceGroupPresentCapabilitiesKHR fp_vkGetDeviceGroupPresentCapabilitiesKHR = nullptr; #else - void * fp_vkGetDeviceGroupPresentCapabilitiesKHR{}; + void* fp_vkGetDeviceGroupPresentCapabilitiesKHR{}; #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) PFN_vkGetDeviceGroupSurfacePresentModesKHR fp_vkGetDeviceGroupSurfacePresentModesKHR = nullptr; #else - void * fp_vkGetDeviceGroupSurfacePresentModesKHR{}; + void* fp_vkGetDeviceGroupSurfacePresentModesKHR{}; #endif #if (defined(VK_KHR_swapchain)) || (defined(VK_KHR_device_group)) PFN_vkAcquireNextImage2KHR fp_vkAcquireNextImage2KHR = nullptr; #else - void * fp_vkAcquireNextImage2KHR{}; + void* fp_vkAcquireNextImage2KHR{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkCmdDispatchBase fp_vkCmdDispatchBase = nullptr; #else - void * fp_vkCmdDispatchBase{}; + void* fp_vkCmdDispatchBase{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkCreateDescriptorUpdateTemplate fp_vkCreateDescriptorUpdateTemplate = nullptr; #else - void * fp_vkCreateDescriptorUpdateTemplate{}; + void* fp_vkCreateDescriptorUpdateTemplate{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkDestroyDescriptorUpdateTemplate fp_vkDestroyDescriptorUpdateTemplate = nullptr; #else - void * fp_vkDestroyDescriptorUpdateTemplate{}; + void* fp_vkDestroyDescriptorUpdateTemplate{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkUpdateDescriptorSetWithTemplate fp_vkUpdateDescriptorSetWithTemplate = nullptr; #else - void * fp_vkUpdateDescriptorSetWithTemplate{}; + void* fp_vkUpdateDescriptorSetWithTemplate{}; #endif #if (defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_descriptor_update_template)) PFN_vkCmdPushDescriptorSetWithTemplateKHR fp_vkCmdPushDescriptorSetWithTemplateKHR = nullptr; #else - void * fp_vkCmdPushDescriptorSetWithTemplateKHR{}; + void* fp_vkCmdPushDescriptorSetWithTemplateKHR{}; #endif #if (defined(VK_EXT_hdr_metadata)) PFN_vkSetHdrMetadataEXT fp_vkSetHdrMetadataEXT = nullptr; #else - void * fp_vkSetHdrMetadataEXT{}; + void* fp_vkSetHdrMetadataEXT{}; #endif #if (defined(VK_KHR_shared_presentable_image)) PFN_vkGetSwapchainStatusKHR fp_vkGetSwapchainStatusKHR = nullptr; #else - void * fp_vkGetSwapchainStatusKHR{}; + void* fp_vkGetSwapchainStatusKHR{}; #endif #if (defined(VK_GOOGLE_display_timing)) PFN_vkGetRefreshCycleDurationGOOGLE fp_vkGetRefreshCycleDurationGOOGLE = nullptr; #else - void * fp_vkGetRefreshCycleDurationGOOGLE{}; + void* fp_vkGetRefreshCycleDurationGOOGLE{}; #endif #if (defined(VK_GOOGLE_display_timing)) PFN_vkGetPastPresentationTimingGOOGLE fp_vkGetPastPresentationTimingGOOGLE = nullptr; #else - void * fp_vkGetPastPresentationTimingGOOGLE{}; + void* fp_vkGetPastPresentationTimingGOOGLE{}; #endif #if (defined(VK_NV_clip_space_w_scaling)) PFN_vkCmdSetViewportWScalingNV fp_vkCmdSetViewportWScalingNV = nullptr; #else - void * fp_vkCmdSetViewportWScalingNV{}; + void* fp_vkCmdSetViewportWScalingNV{}; #endif #if (defined(VK_EXT_discard_rectangles)) PFN_vkCmdSetDiscardRectangleEXT fp_vkCmdSetDiscardRectangleEXT = nullptr; #else - void * fp_vkCmdSetDiscardRectangleEXT{}; + void* fp_vkCmdSetDiscardRectangleEXT{}; #endif #if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 PFN_vkCmdSetDiscardRectangleEnableEXT fp_vkCmdSetDiscardRectangleEnableEXT = nullptr; #else - void * fp_vkCmdSetDiscardRectangleEnableEXT{}; + void* fp_vkCmdSetDiscardRectangleEnableEXT{}; #endif #if ((defined(VK_EXT_discard_rectangles))) && VK_HEADER_VERSION >= 241 PFN_vkCmdSetDiscardRectangleModeEXT fp_vkCmdSetDiscardRectangleModeEXT = nullptr; #else - void * fp_vkCmdSetDiscardRectangleModeEXT{}; + void* fp_vkCmdSetDiscardRectangleModeEXT{}; #endif #if (defined(VK_EXT_sample_locations)) PFN_vkCmdSetSampleLocationsEXT fp_vkCmdSetSampleLocationsEXT = nullptr; #else - void * fp_vkCmdSetSampleLocationsEXT{}; + void* fp_vkCmdSetSampleLocationsEXT{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetBufferMemoryRequirements2 fp_vkGetBufferMemoryRequirements2 = nullptr; #else - void * fp_vkGetBufferMemoryRequirements2{}; + void* fp_vkGetBufferMemoryRequirements2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetImageMemoryRequirements2 fp_vkGetImageMemoryRequirements2 = nullptr; #else - void * fp_vkGetImageMemoryRequirements2{}; + void* fp_vkGetImageMemoryRequirements2{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetImageSparseMemoryRequirements2 fp_vkGetImageSparseMemoryRequirements2 = nullptr; #else - void * fp_vkGetImageSparseMemoryRequirements2{}; + void* fp_vkGetImageSparseMemoryRequirements2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkGetDeviceBufferMemoryRequirements fp_vkGetDeviceBufferMemoryRequirements = nullptr; #else - void * fp_vkGetDeviceBufferMemoryRequirements{}; + void* fp_vkGetDeviceBufferMemoryRequirements{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkGetDeviceImageMemoryRequirements fp_vkGetDeviceImageMemoryRequirements = nullptr; #else - void * fp_vkGetDeviceImageMemoryRequirements{}; + void* fp_vkGetDeviceImageMemoryRequirements{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkGetDeviceImageSparseMemoryRequirements fp_vkGetDeviceImageSparseMemoryRequirements = nullptr; #else - void * fp_vkGetDeviceImageSparseMemoryRequirements{}; + void* fp_vkGetDeviceImageSparseMemoryRequirements{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkCreateSamplerYcbcrConversion fp_vkCreateSamplerYcbcrConversion = nullptr; #else - void * fp_vkCreateSamplerYcbcrConversion{}; + void* fp_vkCreateSamplerYcbcrConversion{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkDestroySamplerYcbcrConversion fp_vkDestroySamplerYcbcrConversion = nullptr; #else - void * fp_vkDestroySamplerYcbcrConversion{}; + void* fp_vkDestroySamplerYcbcrConversion{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetDeviceQueue2 fp_vkGetDeviceQueue2 = nullptr; #else - void * fp_vkGetDeviceQueue2{}; + void* fp_vkGetDeviceQueue2{}; #endif #if (defined(VK_EXT_validation_cache)) PFN_vkCreateValidationCacheEXT fp_vkCreateValidationCacheEXT = nullptr; #else - void * fp_vkCreateValidationCacheEXT{}; + void* fp_vkCreateValidationCacheEXT{}; #endif #if (defined(VK_EXT_validation_cache)) PFN_vkDestroyValidationCacheEXT fp_vkDestroyValidationCacheEXT = nullptr; #else - void * fp_vkDestroyValidationCacheEXT{}; + void* fp_vkDestroyValidationCacheEXT{}; #endif #if (defined(VK_EXT_validation_cache)) PFN_vkGetValidationCacheDataEXT fp_vkGetValidationCacheDataEXT = nullptr; #else - void * fp_vkGetValidationCacheDataEXT{}; + void* fp_vkGetValidationCacheDataEXT{}; #endif #if (defined(VK_EXT_validation_cache)) PFN_vkMergeValidationCachesEXT fp_vkMergeValidationCachesEXT = nullptr; #else - void * fp_vkMergeValidationCachesEXT{}; + void* fp_vkMergeValidationCachesEXT{}; #endif #if (defined(VK_VERSION_1_1)) PFN_vkGetDescriptorSetLayoutSupport fp_vkGetDescriptorSetLayoutSupport = nullptr; #else - void * fp_vkGetDescriptorSetLayoutSupport{}; + void* fp_vkGetDescriptorSetLayoutSupport{}; #endif #if (defined(VK_ANDROID_native_buffer)) PFN_vkGetSwapchainGrallocUsageANDROID fp_vkGetSwapchainGrallocUsageANDROID = nullptr; #else - void * fp_vkGetSwapchainGrallocUsageANDROID{}; + void* fp_vkGetSwapchainGrallocUsageANDROID{}; #endif #if (defined(VK_ANDROID_native_buffer)) PFN_vkGetSwapchainGrallocUsage2ANDROID fp_vkGetSwapchainGrallocUsage2ANDROID = nullptr; #else - void * fp_vkGetSwapchainGrallocUsage2ANDROID{}; + void* fp_vkGetSwapchainGrallocUsage2ANDROID{}; #endif #if (defined(VK_ANDROID_native_buffer)) PFN_vkAcquireImageANDROID fp_vkAcquireImageANDROID = nullptr; #else - void * fp_vkAcquireImageANDROID{}; + void* fp_vkAcquireImageANDROID{}; #endif #if (defined(VK_ANDROID_native_buffer)) PFN_vkQueueSignalReleaseImageANDROID fp_vkQueueSignalReleaseImageANDROID = nullptr; #else - void * fp_vkQueueSignalReleaseImageANDROID{}; + void* fp_vkQueueSignalReleaseImageANDROID{}; #endif #if (defined(VK_AMD_shader_info)) PFN_vkGetShaderInfoAMD fp_vkGetShaderInfoAMD = nullptr; #else - void * fp_vkGetShaderInfoAMD{}; + void* fp_vkGetShaderInfoAMD{}; #endif #if (defined(VK_AMD_display_native_hdr)) PFN_vkSetLocalDimmingAMD fp_vkSetLocalDimmingAMD = nullptr; #else - void * fp_vkSetLocalDimmingAMD{}; + void* fp_vkSetLocalDimmingAMD{}; #endif #if (defined(VK_KHR_calibrated_timestamps)) PFN_vkGetCalibratedTimestampsKHR fp_vkGetCalibratedTimestampsKHR = nullptr; #else - void * fp_vkGetCalibratedTimestampsKHR{}; + void* fp_vkGetCalibratedTimestampsKHR{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkSetDebugUtilsObjectNameEXT fp_vkSetDebugUtilsObjectNameEXT = nullptr; #else - void * fp_vkSetDebugUtilsObjectNameEXT{}; + void* fp_vkSetDebugUtilsObjectNameEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkSetDebugUtilsObjectTagEXT fp_vkSetDebugUtilsObjectTagEXT = nullptr; #else - void * fp_vkSetDebugUtilsObjectTagEXT{}; + void* fp_vkSetDebugUtilsObjectTagEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkQueueBeginDebugUtilsLabelEXT fp_vkQueueBeginDebugUtilsLabelEXT = nullptr; #else - void * fp_vkQueueBeginDebugUtilsLabelEXT{}; + void* fp_vkQueueBeginDebugUtilsLabelEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkQueueEndDebugUtilsLabelEXT fp_vkQueueEndDebugUtilsLabelEXT = nullptr; #else - void * fp_vkQueueEndDebugUtilsLabelEXT{}; + void* fp_vkQueueEndDebugUtilsLabelEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkQueueInsertDebugUtilsLabelEXT fp_vkQueueInsertDebugUtilsLabelEXT = nullptr; #else - void * fp_vkQueueInsertDebugUtilsLabelEXT{}; + void* fp_vkQueueInsertDebugUtilsLabelEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkCmdBeginDebugUtilsLabelEXT fp_vkCmdBeginDebugUtilsLabelEXT = nullptr; #else - void * fp_vkCmdBeginDebugUtilsLabelEXT{}; + void* fp_vkCmdBeginDebugUtilsLabelEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkCmdEndDebugUtilsLabelEXT fp_vkCmdEndDebugUtilsLabelEXT = nullptr; #else - void * fp_vkCmdEndDebugUtilsLabelEXT{}; + void* fp_vkCmdEndDebugUtilsLabelEXT{}; #endif #if (defined(VK_EXT_debug_utils)) PFN_vkCmdInsertDebugUtilsLabelEXT fp_vkCmdInsertDebugUtilsLabelEXT = nullptr; #else - void * fp_vkCmdInsertDebugUtilsLabelEXT{}; + void* fp_vkCmdInsertDebugUtilsLabelEXT{}; #endif #if (defined(VK_EXT_external_memory_host)) PFN_vkGetMemoryHostPointerPropertiesEXT fp_vkGetMemoryHostPointerPropertiesEXT = nullptr; #else - void * fp_vkGetMemoryHostPointerPropertiesEXT{}; + void* fp_vkGetMemoryHostPointerPropertiesEXT{}; #endif #if (defined(VK_AMD_buffer_marker)) PFN_vkCmdWriteBufferMarkerAMD fp_vkCmdWriteBufferMarkerAMD = nullptr; #else - void * fp_vkCmdWriteBufferMarkerAMD{}; + void* fp_vkCmdWriteBufferMarkerAMD{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkCreateRenderPass2 fp_vkCreateRenderPass2 = nullptr; #else - void * fp_vkCreateRenderPass2{}; + void* fp_vkCreateRenderPass2{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkCmdBeginRenderPass2 fp_vkCmdBeginRenderPass2 = nullptr; #else - void * fp_vkCmdBeginRenderPass2{}; + void* fp_vkCmdBeginRenderPass2{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkCmdNextSubpass2 fp_vkCmdNextSubpass2 = nullptr; #else - void * fp_vkCmdNextSubpass2{}; + void* fp_vkCmdNextSubpass2{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkCmdEndRenderPass2 fp_vkCmdEndRenderPass2 = nullptr; #else - void * fp_vkCmdEndRenderPass2{}; + void* fp_vkCmdEndRenderPass2{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkGetSemaphoreCounterValue fp_vkGetSemaphoreCounterValue = nullptr; #else - void * fp_vkGetSemaphoreCounterValue{}; + void* fp_vkGetSemaphoreCounterValue{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkWaitSemaphores fp_vkWaitSemaphores = nullptr; #else - void * fp_vkWaitSemaphores{}; + void* fp_vkWaitSemaphores{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkSignalSemaphore fp_vkSignalSemaphore = nullptr; #else - void * fp_vkSignalSemaphore{}; + void* fp_vkSignalSemaphore{}; #endif #if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) PFN_vkGetAndroidHardwareBufferPropertiesANDROID fp_vkGetAndroidHardwareBufferPropertiesANDROID = nullptr; #else - void * fp_vkGetAndroidHardwareBufferPropertiesANDROID{}; + void* fp_vkGetAndroidHardwareBufferPropertiesANDROID{}; #endif #if (defined(VK_ANDROID_external_memory_android_hardware_buffer)) PFN_vkGetMemoryAndroidHardwareBufferANDROID fp_vkGetMemoryAndroidHardwareBufferANDROID = nullptr; #else - void * fp_vkGetMemoryAndroidHardwareBufferANDROID{}; + void* fp_vkGetMemoryAndroidHardwareBufferANDROID{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkCmdDrawIndirectCount fp_vkCmdDrawIndirectCount = nullptr; #else - void * fp_vkCmdDrawIndirectCount{}; + void* fp_vkCmdDrawIndirectCount{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkCmdDrawIndexedIndirectCount fp_vkCmdDrawIndexedIndirectCount = nullptr; #else - void * fp_vkCmdDrawIndexedIndirectCount{}; + void* fp_vkCmdDrawIndexedIndirectCount{}; #endif #if (defined(VK_NV_device_diagnostic_checkpoints)) PFN_vkCmdSetCheckpointNV fp_vkCmdSetCheckpointNV = nullptr; #else - void * fp_vkCmdSetCheckpointNV{}; + void* fp_vkCmdSetCheckpointNV{}; #endif #if (defined(VK_NV_device_diagnostic_checkpoints)) PFN_vkGetQueueCheckpointDataNV fp_vkGetQueueCheckpointDataNV = nullptr; #else - void * fp_vkGetQueueCheckpointDataNV{}; + void* fp_vkGetQueueCheckpointDataNV{}; #endif #if (defined(VK_EXT_transform_feedback)) PFN_vkCmdBindTransformFeedbackBuffersEXT fp_vkCmdBindTransformFeedbackBuffersEXT = nullptr; #else - void * fp_vkCmdBindTransformFeedbackBuffersEXT{}; + void* fp_vkCmdBindTransformFeedbackBuffersEXT{}; #endif #if (defined(VK_EXT_transform_feedback)) PFN_vkCmdBeginTransformFeedbackEXT fp_vkCmdBeginTransformFeedbackEXT = nullptr; #else - void * fp_vkCmdBeginTransformFeedbackEXT{}; + void* fp_vkCmdBeginTransformFeedbackEXT{}; #endif #if (defined(VK_EXT_transform_feedback)) PFN_vkCmdEndTransformFeedbackEXT fp_vkCmdEndTransformFeedbackEXT = nullptr; #else - void * fp_vkCmdEndTransformFeedbackEXT{}; + void* fp_vkCmdEndTransformFeedbackEXT{}; #endif #if (defined(VK_EXT_transform_feedback)) PFN_vkCmdBeginQueryIndexedEXT fp_vkCmdBeginQueryIndexedEXT = nullptr; #else - void * fp_vkCmdBeginQueryIndexedEXT{}; + void* fp_vkCmdBeginQueryIndexedEXT{}; #endif #if (defined(VK_EXT_transform_feedback)) PFN_vkCmdEndQueryIndexedEXT fp_vkCmdEndQueryIndexedEXT = nullptr; #else - void * fp_vkCmdEndQueryIndexedEXT{}; + void* fp_vkCmdEndQueryIndexedEXT{}; #endif #if (defined(VK_EXT_transform_feedback)) PFN_vkCmdDrawIndirectByteCountEXT fp_vkCmdDrawIndirectByteCountEXT = nullptr; #else - void * fp_vkCmdDrawIndirectByteCountEXT{}; + void* fp_vkCmdDrawIndirectByteCountEXT{}; #endif #if (defined(VK_NV_scissor_exclusive)) PFN_vkCmdSetExclusiveScissorNV fp_vkCmdSetExclusiveScissorNV = nullptr; #else - void * fp_vkCmdSetExclusiveScissorNV{}; + void* fp_vkCmdSetExclusiveScissorNV{}; #endif #if ((defined(VK_NV_scissor_exclusive))) && VK_HEADER_VERSION >= 241 PFN_vkCmdSetExclusiveScissorEnableNV fp_vkCmdSetExclusiveScissorEnableNV = nullptr; #else - void * fp_vkCmdSetExclusiveScissorEnableNV{}; + void* fp_vkCmdSetExclusiveScissorEnableNV{}; #endif #if (defined(VK_NV_shading_rate_image)) PFN_vkCmdBindShadingRateImageNV fp_vkCmdBindShadingRateImageNV = nullptr; #else - void * fp_vkCmdBindShadingRateImageNV{}; + void* fp_vkCmdBindShadingRateImageNV{}; #endif #if (defined(VK_NV_shading_rate_image)) PFN_vkCmdSetViewportShadingRatePaletteNV fp_vkCmdSetViewportShadingRatePaletteNV = nullptr; #else - void * fp_vkCmdSetViewportShadingRatePaletteNV{}; + void* fp_vkCmdSetViewportShadingRatePaletteNV{}; #endif #if (defined(VK_NV_shading_rate_image)) PFN_vkCmdSetCoarseSampleOrderNV fp_vkCmdSetCoarseSampleOrderNV = nullptr; #else - void * fp_vkCmdSetCoarseSampleOrderNV{}; + void* fp_vkCmdSetCoarseSampleOrderNV{}; #endif #if (defined(VK_NV_mesh_shader)) PFN_vkCmdDrawMeshTasksNV fp_vkCmdDrawMeshTasksNV = nullptr; #else - void * fp_vkCmdDrawMeshTasksNV{}; + void* fp_vkCmdDrawMeshTasksNV{}; #endif #if (defined(VK_NV_mesh_shader)) PFN_vkCmdDrawMeshTasksIndirectNV fp_vkCmdDrawMeshTasksIndirectNV = nullptr; #else - void * fp_vkCmdDrawMeshTasksIndirectNV{}; + void* fp_vkCmdDrawMeshTasksIndirectNV{}; #endif #if (defined(VK_NV_mesh_shader)) PFN_vkCmdDrawMeshTasksIndirectCountNV fp_vkCmdDrawMeshTasksIndirectCountNV = nullptr; #else - void * fp_vkCmdDrawMeshTasksIndirectCountNV{}; + void* fp_vkCmdDrawMeshTasksIndirectCountNV{}; #endif #if (defined(VK_EXT_mesh_shader)) PFN_vkCmdDrawMeshTasksEXT fp_vkCmdDrawMeshTasksEXT = nullptr; #else - void * fp_vkCmdDrawMeshTasksEXT{}; + void* fp_vkCmdDrawMeshTasksEXT{}; #endif #if (defined(VK_EXT_mesh_shader)) PFN_vkCmdDrawMeshTasksIndirectEXT fp_vkCmdDrawMeshTasksIndirectEXT = nullptr; #else - void * fp_vkCmdDrawMeshTasksIndirectEXT{}; + void* fp_vkCmdDrawMeshTasksIndirectEXT{}; #endif #if (defined(VK_EXT_mesh_shader)) PFN_vkCmdDrawMeshTasksIndirectCountEXT fp_vkCmdDrawMeshTasksIndirectCountEXT = nullptr; #else - void * fp_vkCmdDrawMeshTasksIndirectCountEXT{}; + void* fp_vkCmdDrawMeshTasksIndirectCountEXT{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCompileDeferredNV fp_vkCompileDeferredNV = nullptr; #else - void * fp_vkCompileDeferredNV{}; + void* fp_vkCompileDeferredNV{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCreateAccelerationStructureNV fp_vkCreateAccelerationStructureNV = nullptr; #else - void * fp_vkCreateAccelerationStructureNV{}; + void* fp_vkCreateAccelerationStructureNV{}; #endif #if (defined(VK_HUAWEI_invocation_mask)) PFN_vkCmdBindInvocationMaskHUAWEI fp_vkCmdBindInvocationMaskHUAWEI = nullptr; #else - void * fp_vkCmdBindInvocationMaskHUAWEI{}; + void* fp_vkCmdBindInvocationMaskHUAWEI{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkDestroyAccelerationStructureKHR fp_vkDestroyAccelerationStructureKHR = nullptr; #else - void * fp_vkDestroyAccelerationStructureKHR{}; + void* fp_vkDestroyAccelerationStructureKHR{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkDestroyAccelerationStructureNV fp_vkDestroyAccelerationStructureNV = nullptr; #else - void * fp_vkDestroyAccelerationStructureNV{}; + void* fp_vkDestroyAccelerationStructureNV{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkGetAccelerationStructureMemoryRequirementsNV fp_vkGetAccelerationStructureMemoryRequirementsNV = nullptr; #else - void * fp_vkGetAccelerationStructureMemoryRequirementsNV{}; + void* fp_vkGetAccelerationStructureMemoryRequirementsNV{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkBindAccelerationStructureMemoryNV fp_vkBindAccelerationStructureMemoryNV = nullptr; #else - void * fp_vkBindAccelerationStructureMemoryNV{}; + void* fp_vkBindAccelerationStructureMemoryNV{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCmdCopyAccelerationStructureNV fp_vkCmdCopyAccelerationStructureNV = nullptr; #else - void * fp_vkCmdCopyAccelerationStructureNV{}; + void* fp_vkCmdCopyAccelerationStructureNV{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCmdCopyAccelerationStructureKHR fp_vkCmdCopyAccelerationStructureKHR = nullptr; #else - void * fp_vkCmdCopyAccelerationStructureKHR{}; + void* fp_vkCmdCopyAccelerationStructureKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCopyAccelerationStructureKHR fp_vkCopyAccelerationStructureKHR = nullptr; #else - void * fp_vkCopyAccelerationStructureKHR{}; + void* fp_vkCopyAccelerationStructureKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCmdCopyAccelerationStructureToMemoryKHR fp_vkCmdCopyAccelerationStructureToMemoryKHR = nullptr; #else - void * fp_vkCmdCopyAccelerationStructureToMemoryKHR{}; + void* fp_vkCmdCopyAccelerationStructureToMemoryKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCopyAccelerationStructureToMemoryKHR fp_vkCopyAccelerationStructureToMemoryKHR = nullptr; #else - void * fp_vkCopyAccelerationStructureToMemoryKHR{}; + void* fp_vkCopyAccelerationStructureToMemoryKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCmdCopyMemoryToAccelerationStructureKHR fp_vkCmdCopyMemoryToAccelerationStructureKHR = nullptr; #else - void * fp_vkCmdCopyMemoryToAccelerationStructureKHR{}; + void* fp_vkCmdCopyMemoryToAccelerationStructureKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCopyMemoryToAccelerationStructureKHR fp_vkCopyMemoryToAccelerationStructureKHR = nullptr; #else - void * fp_vkCopyMemoryToAccelerationStructureKHR{}; + void* fp_vkCopyMemoryToAccelerationStructureKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCmdWriteAccelerationStructuresPropertiesKHR fp_vkCmdWriteAccelerationStructuresPropertiesKHR = nullptr; #else - void * fp_vkCmdWriteAccelerationStructuresPropertiesKHR{}; + void* fp_vkCmdWriteAccelerationStructuresPropertiesKHR{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCmdWriteAccelerationStructuresPropertiesNV fp_vkCmdWriteAccelerationStructuresPropertiesNV = nullptr; #else - void * fp_vkCmdWriteAccelerationStructuresPropertiesNV{}; + void* fp_vkCmdWriteAccelerationStructuresPropertiesNV{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCmdBuildAccelerationStructureNV fp_vkCmdBuildAccelerationStructureNV = nullptr; #else - void * fp_vkCmdBuildAccelerationStructureNV{}; + void* fp_vkCmdBuildAccelerationStructureNV{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkWriteAccelerationStructuresPropertiesKHR fp_vkWriteAccelerationStructuresPropertiesKHR = nullptr; #else - void * fp_vkWriteAccelerationStructuresPropertiesKHR{}; + void* fp_vkWriteAccelerationStructuresPropertiesKHR{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) PFN_vkCmdTraceRaysKHR fp_vkCmdTraceRaysKHR = nullptr; #else - void * fp_vkCmdTraceRaysKHR{}; + void* fp_vkCmdTraceRaysKHR{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCmdTraceRaysNV fp_vkCmdTraceRaysNV = nullptr; #else - void * fp_vkCmdTraceRaysNV{}; + void* fp_vkCmdTraceRaysNV{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) PFN_vkGetRayTracingShaderGroupHandlesKHR fp_vkGetRayTracingShaderGroupHandlesKHR = nullptr; #else - void * fp_vkGetRayTracingShaderGroupHandlesKHR{}; + void* fp_vkGetRayTracingShaderGroupHandlesKHR{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = nullptr; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + nullptr; #else - void * fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR{}; + void* fp_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkGetAccelerationStructureHandleNV fp_vkGetAccelerationStructureHandleNV = nullptr; #else - void * fp_vkGetAccelerationStructureHandleNV{}; + void* fp_vkGetAccelerationStructureHandleNV{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkCreateRayTracingPipelinesNV fp_vkCreateRayTracingPipelinesNV = nullptr; #else - void * fp_vkCreateRayTracingPipelinesNV{}; + void* fp_vkCreateRayTracingPipelinesNV{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) PFN_vkCreateRayTracingPipelinesKHR fp_vkCreateRayTracingPipelinesKHR = nullptr; #else - void * fp_vkCreateRayTracingPipelinesKHR{}; + void* fp_vkCreateRayTracingPipelinesKHR{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) PFN_vkCmdTraceRaysIndirectKHR fp_vkCmdTraceRaysIndirectKHR = nullptr; #else - void * fp_vkCmdTraceRaysIndirectKHR{}; + void* fp_vkCmdTraceRaysIndirectKHR{}; #endif #if (defined(VK_KHR_ray_tracing_maintenance1)) PFN_vkCmdTraceRaysIndirect2KHR fp_vkCmdTraceRaysIndirect2KHR = nullptr; #else - void * fp_vkCmdTraceRaysIndirect2KHR{}; + void* fp_vkCmdTraceRaysIndirect2KHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkGetDeviceAccelerationStructureCompatibilityKHR fp_vkGetDeviceAccelerationStructureCompatibilityKHR = nullptr; #else - void * fp_vkGetDeviceAccelerationStructureCompatibilityKHR{}; + void* fp_vkGetDeviceAccelerationStructureCompatibilityKHR{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) PFN_vkGetRayTracingShaderGroupStackSizeKHR fp_vkGetRayTracingShaderGroupStackSizeKHR = nullptr; #else - void * fp_vkGetRayTracingShaderGroupStackSizeKHR{}; + void* fp_vkGetRayTracingShaderGroupStackSizeKHR{}; #endif #if (defined(VK_KHR_ray_tracing_pipeline)) PFN_vkCmdSetRayTracingPipelineStackSizeKHR fp_vkCmdSetRayTracingPipelineStackSizeKHR = nullptr; #else - void * fp_vkCmdSetRayTracingPipelineStackSizeKHR{}; + void* fp_vkCmdSetRayTracingPipelineStackSizeKHR{}; #endif #if (defined(VK_EXT_full_screen_exclusive)) PFN_vkGetDeviceGroupSurfacePresentModes2EXT fp_vkGetDeviceGroupSurfacePresentModes2EXT = nullptr; #else - void * fp_vkGetDeviceGroupSurfacePresentModes2EXT{}; + void* fp_vkGetDeviceGroupSurfacePresentModes2EXT{}; #endif #if (defined(VK_EXT_full_screen_exclusive)) PFN_vkAcquireFullScreenExclusiveModeEXT fp_vkAcquireFullScreenExclusiveModeEXT = nullptr; #else - void * fp_vkAcquireFullScreenExclusiveModeEXT{}; + void* fp_vkAcquireFullScreenExclusiveModeEXT{}; #endif #if (defined(VK_EXT_full_screen_exclusive)) PFN_vkReleaseFullScreenExclusiveModeEXT fp_vkReleaseFullScreenExclusiveModeEXT = nullptr; #else - void * fp_vkReleaseFullScreenExclusiveModeEXT{}; + void* fp_vkReleaseFullScreenExclusiveModeEXT{}; #endif #if (defined(VK_KHR_performance_query)) PFN_vkAcquireProfilingLockKHR fp_vkAcquireProfilingLockKHR = nullptr; #else - void * fp_vkAcquireProfilingLockKHR{}; + void* fp_vkAcquireProfilingLockKHR{}; #endif #if (defined(VK_KHR_performance_query)) PFN_vkReleaseProfilingLockKHR fp_vkReleaseProfilingLockKHR = nullptr; #else - void * fp_vkReleaseProfilingLockKHR{}; + void* fp_vkReleaseProfilingLockKHR{}; #endif #if (defined(VK_EXT_image_drm_format_modifier)) PFN_vkGetImageDrmFormatModifierPropertiesEXT fp_vkGetImageDrmFormatModifierPropertiesEXT = nullptr; #else - void * fp_vkGetImageDrmFormatModifierPropertiesEXT{}; + void* fp_vkGetImageDrmFormatModifierPropertiesEXT{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkGetBufferOpaqueCaptureAddress fp_vkGetBufferOpaqueCaptureAddress = nullptr; #else - void * fp_vkGetBufferOpaqueCaptureAddress{}; + void* fp_vkGetBufferOpaqueCaptureAddress{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkGetBufferDeviceAddress fp_vkGetBufferDeviceAddress = nullptr; #else - void * fp_vkGetBufferDeviceAddress{}; + void* fp_vkGetBufferDeviceAddress{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkInitializePerformanceApiINTEL fp_vkInitializePerformanceApiINTEL = nullptr; #else - void * fp_vkInitializePerformanceApiINTEL{}; + void* fp_vkInitializePerformanceApiINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkUninitializePerformanceApiINTEL fp_vkUninitializePerformanceApiINTEL = nullptr; #else - void * fp_vkUninitializePerformanceApiINTEL{}; + void* fp_vkUninitializePerformanceApiINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkCmdSetPerformanceMarkerINTEL fp_vkCmdSetPerformanceMarkerINTEL = nullptr; #else - void * fp_vkCmdSetPerformanceMarkerINTEL{}; + void* fp_vkCmdSetPerformanceMarkerINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkCmdSetPerformanceStreamMarkerINTEL fp_vkCmdSetPerformanceStreamMarkerINTEL = nullptr; #else - void * fp_vkCmdSetPerformanceStreamMarkerINTEL{}; + void* fp_vkCmdSetPerformanceStreamMarkerINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkCmdSetPerformanceOverrideINTEL fp_vkCmdSetPerformanceOverrideINTEL = nullptr; #else - void * fp_vkCmdSetPerformanceOverrideINTEL{}; + void* fp_vkCmdSetPerformanceOverrideINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkAcquirePerformanceConfigurationINTEL fp_vkAcquirePerformanceConfigurationINTEL = nullptr; #else - void * fp_vkAcquirePerformanceConfigurationINTEL{}; + void* fp_vkAcquirePerformanceConfigurationINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkReleasePerformanceConfigurationINTEL fp_vkReleasePerformanceConfigurationINTEL = nullptr; #else - void * fp_vkReleasePerformanceConfigurationINTEL{}; + void* fp_vkReleasePerformanceConfigurationINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkQueueSetPerformanceConfigurationINTEL fp_vkQueueSetPerformanceConfigurationINTEL = nullptr; #else - void * fp_vkQueueSetPerformanceConfigurationINTEL{}; + void* fp_vkQueueSetPerformanceConfigurationINTEL{}; #endif #if (defined(VK_INTEL_performance_query)) PFN_vkGetPerformanceParameterINTEL fp_vkGetPerformanceParameterINTEL = nullptr; #else - void * fp_vkGetPerformanceParameterINTEL{}; + void* fp_vkGetPerformanceParameterINTEL{}; #endif #if (defined(VK_VERSION_1_2)) PFN_vkGetDeviceMemoryOpaqueCaptureAddress fp_vkGetDeviceMemoryOpaqueCaptureAddress = nullptr; #else - void * fp_vkGetDeviceMemoryOpaqueCaptureAddress{}; + void* fp_vkGetDeviceMemoryOpaqueCaptureAddress{}; #endif #if (defined(VK_KHR_pipeline_executable_properties)) PFN_vkGetPipelineExecutablePropertiesKHR fp_vkGetPipelineExecutablePropertiesKHR = nullptr; #else - void * fp_vkGetPipelineExecutablePropertiesKHR{}; + void* fp_vkGetPipelineExecutablePropertiesKHR{}; #endif #if (defined(VK_KHR_pipeline_executable_properties)) PFN_vkGetPipelineExecutableStatisticsKHR fp_vkGetPipelineExecutableStatisticsKHR = nullptr; #else - void * fp_vkGetPipelineExecutableStatisticsKHR{}; + void* fp_vkGetPipelineExecutableStatisticsKHR{}; #endif #if (defined(VK_KHR_pipeline_executable_properties)) - PFN_vkGetPipelineExecutableInternalRepresentationsKHR fp_vkGetPipelineExecutableInternalRepresentationsKHR = nullptr; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR fp_vkGetPipelineExecutableInternalRepresentationsKHR = + nullptr; #else - void * fp_vkGetPipelineExecutableInternalRepresentationsKHR{}; + void* fp_vkGetPipelineExecutableInternalRepresentationsKHR{}; #endif #if (defined(VK_KHR_line_rasterization)) PFN_vkCmdSetLineStippleKHR fp_vkCmdSetLineStippleKHR = nullptr; #else - void * fp_vkCmdSetLineStippleKHR{}; + void* fp_vkCmdSetLineStippleKHR{}; #endif #if (defined(VKSC_VERSION_1_0)) PFN_vkGetFaultData fp_vkGetFaultData = nullptr; #else - void * fp_vkGetFaultData{}; + void* fp_vkGetFaultData{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCreateAccelerationStructureKHR fp_vkCreateAccelerationStructureKHR = nullptr; #else - void * fp_vkCreateAccelerationStructureKHR{}; + void* fp_vkCreateAccelerationStructureKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCmdBuildAccelerationStructuresKHR fp_vkCmdBuildAccelerationStructuresKHR = nullptr; #else - void * fp_vkCmdBuildAccelerationStructuresKHR{}; + void* fp_vkCmdBuildAccelerationStructuresKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkCmdBuildAccelerationStructuresIndirectKHR fp_vkCmdBuildAccelerationStructuresIndirectKHR = nullptr; #else - void * fp_vkCmdBuildAccelerationStructuresIndirectKHR{}; + void* fp_vkCmdBuildAccelerationStructuresIndirectKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkBuildAccelerationStructuresKHR fp_vkBuildAccelerationStructuresKHR = nullptr; #else - void * fp_vkBuildAccelerationStructuresKHR{}; + void* fp_vkBuildAccelerationStructuresKHR{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkGetAccelerationStructureDeviceAddressKHR fp_vkGetAccelerationStructureDeviceAddressKHR = nullptr; #else - void * fp_vkGetAccelerationStructureDeviceAddressKHR{}; + void* fp_vkGetAccelerationStructureDeviceAddressKHR{}; #endif #if (defined(VK_KHR_deferred_host_operations)) PFN_vkCreateDeferredOperationKHR fp_vkCreateDeferredOperationKHR = nullptr; #else - void * fp_vkCreateDeferredOperationKHR{}; + void* fp_vkCreateDeferredOperationKHR{}; #endif #if (defined(VK_KHR_deferred_host_operations)) PFN_vkDestroyDeferredOperationKHR fp_vkDestroyDeferredOperationKHR = nullptr; #else - void * fp_vkDestroyDeferredOperationKHR{}; + void* fp_vkDestroyDeferredOperationKHR{}; #endif #if (defined(VK_KHR_deferred_host_operations)) PFN_vkGetDeferredOperationMaxConcurrencyKHR fp_vkGetDeferredOperationMaxConcurrencyKHR = nullptr; #else - void * fp_vkGetDeferredOperationMaxConcurrencyKHR{}; + void* fp_vkGetDeferredOperationMaxConcurrencyKHR{}; #endif #if (defined(VK_KHR_deferred_host_operations)) PFN_vkGetDeferredOperationResultKHR fp_vkGetDeferredOperationResultKHR = nullptr; #else - void * fp_vkGetDeferredOperationResultKHR{}; + void* fp_vkGetDeferredOperationResultKHR{}; #endif #if (defined(VK_KHR_deferred_host_operations)) PFN_vkDeferredOperationJoinKHR fp_vkDeferredOperationJoinKHR = nullptr; #else - void * fp_vkDeferredOperationJoinKHR{}; + void* fp_vkDeferredOperationJoinKHR{}; #endif #if (defined(VK_NV_device_generated_commands_compute)) PFN_vkGetPipelineIndirectMemoryRequirementsNV fp_vkGetPipelineIndirectMemoryRequirementsNV = nullptr; #else - void * fp_vkGetPipelineIndirectMemoryRequirementsNV{}; + void* fp_vkGetPipelineIndirectMemoryRequirementsNV{}; #endif #if (defined(VK_NV_device_generated_commands_compute)) PFN_vkGetPipelineIndirectDeviceAddressNV fp_vkGetPipelineIndirectDeviceAddressNV = nullptr; #else - void * fp_vkGetPipelineIndirectDeviceAddressNV{}; + void* fp_vkGetPipelineIndirectDeviceAddressNV{}; #endif #if (defined(VK_AMD_anti_lag)) PFN_vkAntiLagUpdateAMD fp_vkAntiLagUpdateAMD = nullptr; #else - void * fp_vkAntiLagUpdateAMD{}; + void* fp_vkAntiLagUpdateAMD{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetCullMode fp_vkCmdSetCullMode = nullptr; #else - void * fp_vkCmdSetCullMode{}; + void* fp_vkCmdSetCullMode{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetFrontFace fp_vkCmdSetFrontFace = nullptr; #else - void * fp_vkCmdSetFrontFace{}; + void* fp_vkCmdSetFrontFace{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetPrimitiveTopology fp_vkCmdSetPrimitiveTopology = nullptr; #else - void * fp_vkCmdSetPrimitiveTopology{}; + void* fp_vkCmdSetPrimitiveTopology{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetViewportWithCount fp_vkCmdSetViewportWithCount = nullptr; #else - void * fp_vkCmdSetViewportWithCount{}; + void* fp_vkCmdSetViewportWithCount{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetScissorWithCount fp_vkCmdSetScissorWithCount = nullptr; #else - void * fp_vkCmdSetScissorWithCount{}; + void* fp_vkCmdSetScissorWithCount{}; #endif #if (defined(VK_KHR_maintenance5)) PFN_vkCmdBindIndexBuffer2KHR fp_vkCmdBindIndexBuffer2KHR = nullptr; #else - void * fp_vkCmdBindIndexBuffer2KHR{}; + void* fp_vkCmdBindIndexBuffer2KHR{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdBindVertexBuffers2 fp_vkCmdBindVertexBuffers2 = nullptr; #else - void * fp_vkCmdBindVertexBuffers2{}; + void* fp_vkCmdBindVertexBuffers2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetDepthTestEnable fp_vkCmdSetDepthTestEnable = nullptr; #else - void * fp_vkCmdSetDepthTestEnable{}; + void* fp_vkCmdSetDepthTestEnable{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetDepthWriteEnable fp_vkCmdSetDepthWriteEnable = nullptr; #else - void * fp_vkCmdSetDepthWriteEnable{}; + void* fp_vkCmdSetDepthWriteEnable{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetDepthCompareOp fp_vkCmdSetDepthCompareOp = nullptr; #else - void * fp_vkCmdSetDepthCompareOp{}; + void* fp_vkCmdSetDepthCompareOp{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetDepthBoundsTestEnable fp_vkCmdSetDepthBoundsTestEnable = nullptr; #else - void * fp_vkCmdSetDepthBoundsTestEnable{}; + void* fp_vkCmdSetDepthBoundsTestEnable{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetStencilTestEnable fp_vkCmdSetStencilTestEnable = nullptr; #else - void * fp_vkCmdSetStencilTestEnable{}; + void* fp_vkCmdSetStencilTestEnable{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetStencilOp fp_vkCmdSetStencilOp = nullptr; #else - void * fp_vkCmdSetStencilOp{}; + void* fp_vkCmdSetStencilOp{}; #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetPatchControlPointsEXT fp_vkCmdSetPatchControlPointsEXT = nullptr; #else - void * fp_vkCmdSetPatchControlPointsEXT{}; + void* fp_vkCmdSetPatchControlPointsEXT{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetRasterizerDiscardEnable fp_vkCmdSetRasterizerDiscardEnable = nullptr; #else - void * fp_vkCmdSetRasterizerDiscardEnable{}; + void* fp_vkCmdSetRasterizerDiscardEnable{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetDepthBiasEnable fp_vkCmdSetDepthBiasEnable = nullptr; #else - void * fp_vkCmdSetDepthBiasEnable{}; + void* fp_vkCmdSetDepthBiasEnable{}; #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetLogicOpEXT fp_vkCmdSetLogicOpEXT = nullptr; #else - void * fp_vkCmdSetLogicOpEXT{}; + void* fp_vkCmdSetLogicOpEXT{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetPrimitiveRestartEnable fp_vkCmdSetPrimitiveRestartEnable = nullptr; #else - void * fp_vkCmdSetPrimitiveRestartEnable{}; + void* fp_vkCmdSetPrimitiveRestartEnable{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetTessellationDomainOriginEXT fp_vkCmdSetTessellationDomainOriginEXT = nullptr; #else - void * fp_vkCmdSetTessellationDomainOriginEXT{}; + void* fp_vkCmdSetTessellationDomainOriginEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthClampEnableEXT fp_vkCmdSetDepthClampEnableEXT = nullptr; #else - void * fp_vkCmdSetDepthClampEnableEXT{}; + void* fp_vkCmdSetDepthClampEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetPolygonModeEXT fp_vkCmdSetPolygonModeEXT = nullptr; #else - void * fp_vkCmdSetPolygonModeEXT{}; + void* fp_vkCmdSetPolygonModeEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetRasterizationSamplesEXT fp_vkCmdSetRasterizationSamplesEXT = nullptr; #else - void * fp_vkCmdSetRasterizationSamplesEXT{}; + void* fp_vkCmdSetRasterizationSamplesEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetSampleMaskEXT fp_vkCmdSetSampleMaskEXT = nullptr; #else - void * fp_vkCmdSetSampleMaskEXT{}; + void* fp_vkCmdSetSampleMaskEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetAlphaToCoverageEnableEXT fp_vkCmdSetAlphaToCoverageEnableEXT = nullptr; #else - void * fp_vkCmdSetAlphaToCoverageEnableEXT{}; + void* fp_vkCmdSetAlphaToCoverageEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetAlphaToOneEnableEXT fp_vkCmdSetAlphaToOneEnableEXT = nullptr; #else - void * fp_vkCmdSetAlphaToOneEnableEXT{}; + void* fp_vkCmdSetAlphaToOneEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetLogicOpEnableEXT fp_vkCmdSetLogicOpEnableEXT = nullptr; #else - void * fp_vkCmdSetLogicOpEnableEXT{}; + void* fp_vkCmdSetLogicOpEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetColorBlendEnableEXT fp_vkCmdSetColorBlendEnableEXT = nullptr; #else - void * fp_vkCmdSetColorBlendEnableEXT{}; + void* fp_vkCmdSetColorBlendEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetColorBlendEquationEXT fp_vkCmdSetColorBlendEquationEXT = nullptr; #else - void * fp_vkCmdSetColorBlendEquationEXT{}; + void* fp_vkCmdSetColorBlendEquationEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetColorWriteMaskEXT fp_vkCmdSetColorWriteMaskEXT = nullptr; #else - void * fp_vkCmdSetColorWriteMaskEXT{}; + void* fp_vkCmdSetColorWriteMaskEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetRasterizationStreamEXT fp_vkCmdSetRasterizationStreamEXT = nullptr; #else - void * fp_vkCmdSetRasterizationStreamEXT{}; + void* fp_vkCmdSetRasterizationStreamEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetConservativeRasterizationModeEXT fp_vkCmdSetConservativeRasterizationModeEXT = nullptr; #else - void * fp_vkCmdSetConservativeRasterizationModeEXT{}; + void* fp_vkCmdSetConservativeRasterizationModeEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT = nullptr; #else - void * fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT{}; + void* fp_vkCmdSetExtraPrimitiveOverestimationSizeEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthClipEnableEXT fp_vkCmdSetDepthClipEnableEXT = nullptr; #else - void * fp_vkCmdSetDepthClipEnableEXT{}; + void* fp_vkCmdSetDepthClipEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetSampleLocationsEnableEXT fp_vkCmdSetSampleLocationsEnableEXT = nullptr; #else - void * fp_vkCmdSetSampleLocationsEnableEXT{}; + void* fp_vkCmdSetSampleLocationsEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetColorBlendAdvancedEXT fp_vkCmdSetColorBlendAdvancedEXT = nullptr; #else - void * fp_vkCmdSetColorBlendAdvancedEXT{}; + void* fp_vkCmdSetColorBlendAdvancedEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetProvokingVertexModeEXT fp_vkCmdSetProvokingVertexModeEXT = nullptr; #else - void * fp_vkCmdSetProvokingVertexModeEXT{}; + void* fp_vkCmdSetProvokingVertexModeEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetLineRasterizationModeEXT fp_vkCmdSetLineRasterizationModeEXT = nullptr; #else - void * fp_vkCmdSetLineRasterizationModeEXT{}; + void* fp_vkCmdSetLineRasterizationModeEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetLineStippleEnableEXT fp_vkCmdSetLineStippleEnableEXT = nullptr; #else - void * fp_vkCmdSetLineStippleEnableEXT{}; + void* fp_vkCmdSetLineStippleEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthClipNegativeOneToOneEXT fp_vkCmdSetDepthClipNegativeOneToOneEXT = nullptr; #else - void * fp_vkCmdSetDepthClipNegativeOneToOneEXT{}; + void* fp_vkCmdSetDepthClipNegativeOneToOneEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetViewportWScalingEnableNV fp_vkCmdSetViewportWScalingEnableNV = nullptr; #else - void * fp_vkCmdSetViewportWScalingEnableNV{}; + void* fp_vkCmdSetViewportWScalingEnableNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetViewportSwizzleNV fp_vkCmdSetViewportSwizzleNV = nullptr; #else - void * fp_vkCmdSetViewportSwizzleNV{}; + void* fp_vkCmdSetViewportSwizzleNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCoverageToColorEnableNV fp_vkCmdSetCoverageToColorEnableNV = nullptr; #else - void * fp_vkCmdSetCoverageToColorEnableNV{}; + void* fp_vkCmdSetCoverageToColorEnableNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCoverageToColorLocationNV fp_vkCmdSetCoverageToColorLocationNV = nullptr; #else - void * fp_vkCmdSetCoverageToColorLocationNV{}; + void* fp_vkCmdSetCoverageToColorLocationNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCoverageModulationModeNV fp_vkCmdSetCoverageModulationModeNV = nullptr; #else - void * fp_vkCmdSetCoverageModulationModeNV{}; + void* fp_vkCmdSetCoverageModulationModeNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCoverageModulationTableEnableNV fp_vkCmdSetCoverageModulationTableEnableNV = nullptr; #else - void * fp_vkCmdSetCoverageModulationTableEnableNV{}; + void* fp_vkCmdSetCoverageModulationTableEnableNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCoverageModulationTableNV fp_vkCmdSetCoverageModulationTableNV = nullptr; #else - void * fp_vkCmdSetCoverageModulationTableNV{}; + void* fp_vkCmdSetCoverageModulationTableNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetShadingRateImageEnableNV fp_vkCmdSetShadingRateImageEnableNV = nullptr; #else - void * fp_vkCmdSetShadingRateImageEnableNV{}; + void* fp_vkCmdSetShadingRateImageEnableNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCoverageReductionModeNV fp_vkCmdSetCoverageReductionModeNV = nullptr; #else - void * fp_vkCmdSetCoverageReductionModeNV{}; + void* fp_vkCmdSetCoverageReductionModeNV{}; #endif #if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetRepresentativeFragmentTestEnableNV fp_vkCmdSetRepresentativeFragmentTestEnableNV = nullptr; #else - void * fp_vkCmdSetRepresentativeFragmentTestEnableNV{}; + void* fp_vkCmdSetRepresentativeFragmentTestEnableNV{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCreatePrivateDataSlot fp_vkCreatePrivateDataSlot = nullptr; #else - void * fp_vkCreatePrivateDataSlot{}; + void* fp_vkCreatePrivateDataSlot{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkDestroyPrivateDataSlot fp_vkDestroyPrivateDataSlot = nullptr; #else - void * fp_vkDestroyPrivateDataSlot{}; + void* fp_vkDestroyPrivateDataSlot{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkSetPrivateData fp_vkSetPrivateData = nullptr; #else - void * fp_vkSetPrivateData{}; + void* fp_vkSetPrivateData{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkGetPrivateData fp_vkGetPrivateData = nullptr; #else - void * fp_vkGetPrivateData{}; + void* fp_vkGetPrivateData{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdCopyBuffer2 fp_vkCmdCopyBuffer2 = nullptr; #else - void * fp_vkCmdCopyBuffer2{}; + void* fp_vkCmdCopyBuffer2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdCopyImage2 fp_vkCmdCopyImage2 = nullptr; #else - void * fp_vkCmdCopyImage2{}; + void* fp_vkCmdCopyImage2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdBlitImage2 fp_vkCmdBlitImage2 = nullptr; #else - void * fp_vkCmdBlitImage2{}; + void* fp_vkCmdBlitImage2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdCopyBufferToImage2 fp_vkCmdCopyBufferToImage2 = nullptr; #else - void * fp_vkCmdCopyBufferToImage2{}; + void* fp_vkCmdCopyBufferToImage2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdCopyImageToBuffer2 fp_vkCmdCopyImageToBuffer2 = nullptr; #else - void * fp_vkCmdCopyImageToBuffer2{}; + void* fp_vkCmdCopyImageToBuffer2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdResolveImage2 fp_vkCmdResolveImage2 = nullptr; #else - void * fp_vkCmdResolveImage2{}; + void* fp_vkCmdResolveImage2{}; #endif #if (defined(VK_KHR_object_refresh)) PFN_vkCmdRefreshObjectsKHR fp_vkCmdRefreshObjectsKHR = nullptr; #else - void * fp_vkCmdRefreshObjectsKHR{}; + void* fp_vkCmdRefreshObjectsKHR{}; #endif #if (defined(VK_KHR_fragment_shading_rate)) PFN_vkCmdSetFragmentShadingRateKHR fp_vkCmdSetFragmentShadingRateKHR = nullptr; #else - void * fp_vkCmdSetFragmentShadingRateKHR{}; + void* fp_vkCmdSetFragmentShadingRateKHR{}; #endif #if (defined(VK_NV_fragment_shading_rate_enums)) PFN_vkCmdSetFragmentShadingRateEnumNV fp_vkCmdSetFragmentShadingRateEnumNV = nullptr; #else - void * fp_vkCmdSetFragmentShadingRateEnumNV{}; + void* fp_vkCmdSetFragmentShadingRateEnumNV{}; #endif #if (defined(VK_KHR_acceleration_structure)) PFN_vkGetAccelerationStructureBuildSizesKHR fp_vkGetAccelerationStructureBuildSizesKHR = nullptr; #else - void * fp_vkGetAccelerationStructureBuildSizesKHR{}; + void* fp_vkGetAccelerationStructureBuildSizesKHR{}; #endif #if (defined(VK_EXT_vertex_input_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetVertexInputEXT fp_vkCmdSetVertexInputEXT = nullptr; #else - void * fp_vkCmdSetVertexInputEXT{}; + void* fp_vkCmdSetVertexInputEXT{}; #endif #if (defined(VK_EXT_color_write_enable)) PFN_vkCmdSetColorWriteEnableEXT fp_vkCmdSetColorWriteEnableEXT = nullptr; #else - void * fp_vkCmdSetColorWriteEnableEXT{}; + void* fp_vkCmdSetColorWriteEnableEXT{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdSetEvent2 fp_vkCmdSetEvent2 = nullptr; #else - void * fp_vkCmdSetEvent2{}; + void* fp_vkCmdSetEvent2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdResetEvent2 fp_vkCmdResetEvent2 = nullptr; #else - void * fp_vkCmdResetEvent2{}; + void* fp_vkCmdResetEvent2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdWaitEvents2 fp_vkCmdWaitEvents2 = nullptr; #else - void * fp_vkCmdWaitEvents2{}; + void* fp_vkCmdWaitEvents2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdPipelineBarrier2 fp_vkCmdPipelineBarrier2 = nullptr; #else - void * fp_vkCmdPipelineBarrier2{}; + void* fp_vkCmdPipelineBarrier2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkQueueSubmit2 fp_vkQueueSubmit2 = nullptr; #else - void * fp_vkQueueSubmit2{}; + void* fp_vkQueueSubmit2{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdWriteTimestamp2 fp_vkCmdWriteTimestamp2 = nullptr; #else - void * fp_vkCmdWriteTimestamp2{}; + void* fp_vkCmdWriteTimestamp2{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkCmdWriteBufferMarker2AMD fp_vkCmdWriteBufferMarker2AMD = nullptr; #else - void * fp_vkCmdWriteBufferMarker2AMD{}; + void* fp_vkCmdWriteBufferMarker2AMD{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkGetQueueCheckpointData2NV fp_vkGetQueueCheckpointData2NV = nullptr; #else - void * fp_vkGetQueueCheckpointData2NV{}; + void* fp_vkGetQueueCheckpointData2NV{}; #endif #if (defined(VK_EXT_host_image_copy)) PFN_vkCopyMemoryToImageEXT fp_vkCopyMemoryToImageEXT = nullptr; #else - void * fp_vkCopyMemoryToImageEXT{}; + void* fp_vkCopyMemoryToImageEXT{}; #endif #if (defined(VK_EXT_host_image_copy)) PFN_vkCopyImageToMemoryEXT fp_vkCopyImageToMemoryEXT = nullptr; #else - void * fp_vkCopyImageToMemoryEXT{}; + void* fp_vkCopyImageToMemoryEXT{}; #endif #if (defined(VK_EXT_host_image_copy)) PFN_vkCopyImageToImageEXT fp_vkCopyImageToImageEXT = nullptr; #else - void * fp_vkCopyImageToImageEXT{}; + void* fp_vkCopyImageToImageEXT{}; #endif #if (defined(VK_EXT_host_image_copy)) PFN_vkTransitionImageLayoutEXT fp_vkTransitionImageLayoutEXT = nullptr; #else - void * fp_vkTransitionImageLayoutEXT{}; + void* fp_vkTransitionImageLayoutEXT{}; #endif #if (defined(VKSC_VERSION_1_0)) PFN_vkGetCommandPoolMemoryConsumption fp_vkGetCommandPoolMemoryConsumption = nullptr; #else - void * fp_vkGetCommandPoolMemoryConsumption{}; + void* fp_vkGetCommandPoolMemoryConsumption{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkCreateVideoSessionKHR fp_vkCreateVideoSessionKHR = nullptr; #else - void * fp_vkCreateVideoSessionKHR{}; + void* fp_vkCreateVideoSessionKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkDestroyVideoSessionKHR fp_vkDestroyVideoSessionKHR = nullptr; #else - void * fp_vkDestroyVideoSessionKHR{}; + void* fp_vkDestroyVideoSessionKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkCreateVideoSessionParametersKHR fp_vkCreateVideoSessionParametersKHR = nullptr; #else - void * fp_vkCreateVideoSessionParametersKHR{}; + void* fp_vkCreateVideoSessionParametersKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkUpdateVideoSessionParametersKHR fp_vkUpdateVideoSessionParametersKHR = nullptr; #else - void * fp_vkUpdateVideoSessionParametersKHR{}; + void* fp_vkUpdateVideoSessionParametersKHR{}; #endif #if (defined(VK_KHR_video_encode_queue)) PFN_vkGetEncodedVideoSessionParametersKHR fp_vkGetEncodedVideoSessionParametersKHR = nullptr; #else - void * fp_vkGetEncodedVideoSessionParametersKHR{}; + void* fp_vkGetEncodedVideoSessionParametersKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkDestroyVideoSessionParametersKHR fp_vkDestroyVideoSessionParametersKHR = nullptr; #else - void * fp_vkDestroyVideoSessionParametersKHR{}; + void* fp_vkDestroyVideoSessionParametersKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkGetVideoSessionMemoryRequirementsKHR fp_vkGetVideoSessionMemoryRequirementsKHR = nullptr; #else - void * fp_vkGetVideoSessionMemoryRequirementsKHR{}; + void* fp_vkGetVideoSessionMemoryRequirementsKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkBindVideoSessionMemoryKHR fp_vkBindVideoSessionMemoryKHR = nullptr; #else - void * fp_vkBindVideoSessionMemoryKHR{}; + void* fp_vkBindVideoSessionMemoryKHR{}; #endif #if (defined(VK_KHR_video_decode_queue)) PFN_vkCmdDecodeVideoKHR fp_vkCmdDecodeVideoKHR = nullptr; #else - void * fp_vkCmdDecodeVideoKHR{}; + void* fp_vkCmdDecodeVideoKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkCmdBeginVideoCodingKHR fp_vkCmdBeginVideoCodingKHR = nullptr; #else - void * fp_vkCmdBeginVideoCodingKHR{}; + void* fp_vkCmdBeginVideoCodingKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkCmdControlVideoCodingKHR fp_vkCmdControlVideoCodingKHR = nullptr; #else - void * fp_vkCmdControlVideoCodingKHR{}; + void* fp_vkCmdControlVideoCodingKHR{}; #endif #if (defined(VK_KHR_video_queue)) PFN_vkCmdEndVideoCodingKHR fp_vkCmdEndVideoCodingKHR = nullptr; #else - void * fp_vkCmdEndVideoCodingKHR{}; + void* fp_vkCmdEndVideoCodingKHR{}; #endif #if (defined(VK_KHR_video_encode_queue)) PFN_vkCmdEncodeVideoKHR fp_vkCmdEncodeVideoKHR = nullptr; #else - void * fp_vkCmdEncodeVideoKHR{}; + void* fp_vkCmdEncodeVideoKHR{}; #endif #if (defined(VK_NV_memory_decompression)) PFN_vkCmdDecompressMemoryNV fp_vkCmdDecompressMemoryNV = nullptr; #else - void * fp_vkCmdDecompressMemoryNV{}; + void* fp_vkCmdDecompressMemoryNV{}; #endif #if (defined(VK_NV_memory_decompression)) PFN_vkCmdDecompressMemoryIndirectCountNV fp_vkCmdDecompressMemoryIndirectCountNV = nullptr; #else - void * fp_vkCmdDecompressMemoryIndirectCountNV{}; + void* fp_vkCmdDecompressMemoryIndirectCountNV{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetDescriptorSetLayoutSizeEXT fp_vkGetDescriptorSetLayoutSizeEXT = nullptr; #else - void * fp_vkGetDescriptorSetLayoutSizeEXT{}; + void* fp_vkGetDescriptorSetLayoutSizeEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetDescriptorSetLayoutBindingOffsetEXT fp_vkGetDescriptorSetLayoutBindingOffsetEXT = nullptr; #else - void * fp_vkGetDescriptorSetLayoutBindingOffsetEXT{}; + void* fp_vkGetDescriptorSetLayoutBindingOffsetEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetDescriptorEXT fp_vkGetDescriptorEXT = nullptr; #else - void * fp_vkGetDescriptorEXT{}; + void* fp_vkGetDescriptorEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkCmdBindDescriptorBuffersEXT fp_vkCmdBindDescriptorBuffersEXT = nullptr; #else - void * fp_vkCmdBindDescriptorBuffersEXT{}; + void* fp_vkCmdBindDescriptorBuffersEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkCmdSetDescriptorBufferOffsetsEXT fp_vkCmdSetDescriptorBufferOffsetsEXT = nullptr; #else - void * fp_vkCmdSetDescriptorBufferOffsetsEXT{}; + void* fp_vkCmdSetDescriptorBufferOffsetsEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = nullptr; #else - void * fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT{}; + void* fp_vkCmdBindDescriptorBufferEmbeddedSamplersEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT fp_vkGetBufferOpaqueCaptureDescriptorDataEXT = nullptr; #else - void * fp_vkGetBufferOpaqueCaptureDescriptorDataEXT{}; + void* fp_vkGetBufferOpaqueCaptureDescriptorDataEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetImageOpaqueCaptureDescriptorDataEXT fp_vkGetImageOpaqueCaptureDescriptorDataEXT = nullptr; #else - void * fp_vkGetImageOpaqueCaptureDescriptorDataEXT{}; + void* fp_vkGetImageOpaqueCaptureDescriptorDataEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT = nullptr; #else - void * fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT{}; + void* fp_vkGetImageViewOpaqueCaptureDescriptorDataEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT = nullptr; #else - void * fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT{}; + void* fp_vkGetSamplerOpaqueCaptureDescriptorDataEXT{}; #endif #if (defined(VK_EXT_descriptor_buffer)) - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = nullptr; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT + fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = nullptr; #else - void * fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT{}; + void* fp_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT{}; #endif #if (defined(VK_EXT_pageable_device_local_memory)) PFN_vkSetDeviceMemoryPriorityEXT fp_vkSetDeviceMemoryPriorityEXT = nullptr; #else - void * fp_vkSetDeviceMemoryPriorityEXT{}; + void* fp_vkSetDeviceMemoryPriorityEXT{}; #endif #if (defined(VK_KHR_present_wait)) PFN_vkWaitForPresentKHR fp_vkWaitForPresentKHR = nullptr; #else - void * fp_vkWaitForPresentKHR{}; + void* fp_vkWaitForPresentKHR{}; #endif #if (defined(VK_FUCHSIA_buffer_collection)) PFN_vkCreateBufferCollectionFUCHSIA fp_vkCreateBufferCollectionFUCHSIA = nullptr; #else - void * fp_vkCreateBufferCollectionFUCHSIA{}; + void* fp_vkCreateBufferCollectionFUCHSIA{}; #endif #if (defined(VK_FUCHSIA_buffer_collection)) PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA fp_vkSetBufferCollectionBufferConstraintsFUCHSIA = nullptr; #else - void * fp_vkSetBufferCollectionBufferConstraintsFUCHSIA{}; + void* fp_vkSetBufferCollectionBufferConstraintsFUCHSIA{}; #endif #if (defined(VK_FUCHSIA_buffer_collection)) PFN_vkSetBufferCollectionImageConstraintsFUCHSIA fp_vkSetBufferCollectionImageConstraintsFUCHSIA = nullptr; #else - void * fp_vkSetBufferCollectionImageConstraintsFUCHSIA{}; + void* fp_vkSetBufferCollectionImageConstraintsFUCHSIA{}; #endif #if (defined(VK_FUCHSIA_buffer_collection)) PFN_vkDestroyBufferCollectionFUCHSIA fp_vkDestroyBufferCollectionFUCHSIA = nullptr; #else - void * fp_vkDestroyBufferCollectionFUCHSIA{}; + void* fp_vkDestroyBufferCollectionFUCHSIA{}; #endif #if (defined(VK_FUCHSIA_buffer_collection)) PFN_vkGetBufferCollectionPropertiesFUCHSIA fp_vkGetBufferCollectionPropertiesFUCHSIA = nullptr; #else - void * fp_vkGetBufferCollectionPropertiesFUCHSIA{}; + void* fp_vkGetBufferCollectionPropertiesFUCHSIA{}; #endif #if (defined(VK_NV_cuda_kernel_launch)) PFN_vkCreateCudaModuleNV fp_vkCreateCudaModuleNV = nullptr; #else - void * fp_vkCreateCudaModuleNV{}; + void* fp_vkCreateCudaModuleNV{}; #endif #if (defined(VK_NV_cuda_kernel_launch)) PFN_vkGetCudaModuleCacheNV fp_vkGetCudaModuleCacheNV = nullptr; #else - void * fp_vkGetCudaModuleCacheNV{}; + void* fp_vkGetCudaModuleCacheNV{}; #endif #if (defined(VK_NV_cuda_kernel_launch)) PFN_vkCreateCudaFunctionNV fp_vkCreateCudaFunctionNV = nullptr; #else - void * fp_vkCreateCudaFunctionNV{}; + void* fp_vkCreateCudaFunctionNV{}; #endif #if (defined(VK_NV_cuda_kernel_launch)) PFN_vkDestroyCudaModuleNV fp_vkDestroyCudaModuleNV = nullptr; #else - void * fp_vkDestroyCudaModuleNV{}; + void* fp_vkDestroyCudaModuleNV{}; #endif #if (defined(VK_NV_cuda_kernel_launch)) PFN_vkDestroyCudaFunctionNV fp_vkDestroyCudaFunctionNV = nullptr; #else - void * fp_vkDestroyCudaFunctionNV{}; + void* fp_vkDestroyCudaFunctionNV{}; #endif #if (defined(VK_NV_cuda_kernel_launch)) PFN_vkCmdCudaLaunchKernelNV fp_vkCmdCudaLaunchKernelNV = nullptr; #else - void * fp_vkCmdCudaLaunchKernelNV{}; + void* fp_vkCmdCudaLaunchKernelNV{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdBeginRendering fp_vkCmdBeginRendering = nullptr; #else - void * fp_vkCmdBeginRendering{}; + void* fp_vkCmdBeginRendering{}; #endif #if (defined(VK_VERSION_1_3)) PFN_vkCmdEndRendering fp_vkCmdEndRendering = nullptr; #else - void * fp_vkCmdEndRendering{}; + void* fp_vkCmdEndRendering{}; #endif #if (defined(VK_VALVE_descriptor_set_host_mapping)) PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE = nullptr; #else - void * fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE{}; + void* fp_vkGetDescriptorSetLayoutHostMappingInfoVALVE{}; #endif #if (defined(VK_VALVE_descriptor_set_host_mapping)) PFN_vkGetDescriptorSetHostMappingVALVE fp_vkGetDescriptorSetHostMappingVALVE = nullptr; #else - void * fp_vkGetDescriptorSetHostMappingVALVE{}; + void* fp_vkGetDescriptorSetHostMappingVALVE{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCreateMicromapEXT fp_vkCreateMicromapEXT = nullptr; #else - void * fp_vkCreateMicromapEXT{}; + void* fp_vkCreateMicromapEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCmdBuildMicromapsEXT fp_vkCmdBuildMicromapsEXT = nullptr; #else - void * fp_vkCmdBuildMicromapsEXT{}; + void* fp_vkCmdBuildMicromapsEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkBuildMicromapsEXT fp_vkBuildMicromapsEXT = nullptr; #else - void * fp_vkBuildMicromapsEXT{}; + void* fp_vkBuildMicromapsEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkDestroyMicromapEXT fp_vkDestroyMicromapEXT = nullptr; #else - void * fp_vkDestroyMicromapEXT{}; + void* fp_vkDestroyMicromapEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCmdCopyMicromapEXT fp_vkCmdCopyMicromapEXT = nullptr; #else - void * fp_vkCmdCopyMicromapEXT{}; + void* fp_vkCmdCopyMicromapEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCopyMicromapEXT fp_vkCopyMicromapEXT = nullptr; #else - void * fp_vkCopyMicromapEXT{}; + void* fp_vkCopyMicromapEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCmdCopyMicromapToMemoryEXT fp_vkCmdCopyMicromapToMemoryEXT = nullptr; #else - void * fp_vkCmdCopyMicromapToMemoryEXT{}; + void* fp_vkCmdCopyMicromapToMemoryEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCopyMicromapToMemoryEXT fp_vkCopyMicromapToMemoryEXT = nullptr; #else - void * fp_vkCopyMicromapToMemoryEXT{}; + void* fp_vkCopyMicromapToMemoryEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCmdCopyMemoryToMicromapEXT fp_vkCmdCopyMemoryToMicromapEXT = nullptr; #else - void * fp_vkCmdCopyMemoryToMicromapEXT{}; + void* fp_vkCmdCopyMemoryToMicromapEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCopyMemoryToMicromapEXT fp_vkCopyMemoryToMicromapEXT = nullptr; #else - void * fp_vkCopyMemoryToMicromapEXT{}; + void* fp_vkCopyMemoryToMicromapEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkCmdWriteMicromapsPropertiesEXT fp_vkCmdWriteMicromapsPropertiesEXT = nullptr; #else - void * fp_vkCmdWriteMicromapsPropertiesEXT{}; + void* fp_vkCmdWriteMicromapsPropertiesEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkWriteMicromapsPropertiesEXT fp_vkWriteMicromapsPropertiesEXT = nullptr; #else - void * fp_vkWriteMicromapsPropertiesEXT{}; + void* fp_vkWriteMicromapsPropertiesEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkGetDeviceMicromapCompatibilityEXT fp_vkGetDeviceMicromapCompatibilityEXT = nullptr; #else - void * fp_vkGetDeviceMicromapCompatibilityEXT{}; + void* fp_vkGetDeviceMicromapCompatibilityEXT{}; #endif #if (defined(VK_EXT_opacity_micromap)) PFN_vkGetMicromapBuildSizesEXT fp_vkGetMicromapBuildSizesEXT = nullptr; #else - void * fp_vkGetMicromapBuildSizesEXT{}; + void* fp_vkGetMicromapBuildSizesEXT{}; #endif #if (defined(VK_EXT_shader_module_identifier)) PFN_vkGetShaderModuleIdentifierEXT fp_vkGetShaderModuleIdentifierEXT = nullptr; #else - void * fp_vkGetShaderModuleIdentifierEXT{}; + void* fp_vkGetShaderModuleIdentifierEXT{}; #endif #if (defined(VK_EXT_shader_module_identifier)) PFN_vkGetShaderModuleCreateInfoIdentifierEXT fp_vkGetShaderModuleCreateInfoIdentifierEXT = nullptr; #else - void * fp_vkGetShaderModuleCreateInfoIdentifierEXT{}; + void* fp_vkGetShaderModuleCreateInfoIdentifierEXT{}; #endif #if (defined(VK_KHR_maintenance5)) PFN_vkGetImageSubresourceLayout2KHR fp_vkGetImageSubresourceLayout2KHR = nullptr; #else - void * fp_vkGetImageSubresourceLayout2KHR{}; + void* fp_vkGetImageSubresourceLayout2KHR{}; #endif #if (defined(VK_EXT_pipeline_properties)) PFN_vkGetPipelinePropertiesEXT fp_vkGetPipelinePropertiesEXT = nullptr; #else - void * fp_vkGetPipelinePropertiesEXT{}; + void* fp_vkGetPipelinePropertiesEXT{}; #endif #if (defined(VK_EXT_metal_objects)) PFN_vkExportMetalObjectsEXT fp_vkExportMetalObjectsEXT = nullptr; #else - void * fp_vkExportMetalObjectsEXT{}; + void* fp_vkExportMetalObjectsEXT{}; #endif #if (defined(VK_QCOM_tile_properties)) PFN_vkGetFramebufferTilePropertiesQCOM fp_vkGetFramebufferTilePropertiesQCOM = nullptr; #else - void * fp_vkGetFramebufferTilePropertiesQCOM{}; + void* fp_vkGetFramebufferTilePropertiesQCOM{}; #endif #if (defined(VK_QCOM_tile_properties)) PFN_vkGetDynamicRenderingTilePropertiesQCOM fp_vkGetDynamicRenderingTilePropertiesQCOM = nullptr; #else - void * fp_vkGetDynamicRenderingTilePropertiesQCOM{}; + void* fp_vkGetDynamicRenderingTilePropertiesQCOM{}; #endif #if (defined(VK_NV_optical_flow)) PFN_vkCreateOpticalFlowSessionNV fp_vkCreateOpticalFlowSessionNV = nullptr; #else - void * fp_vkCreateOpticalFlowSessionNV{}; + void* fp_vkCreateOpticalFlowSessionNV{}; #endif #if (defined(VK_NV_optical_flow)) PFN_vkDestroyOpticalFlowSessionNV fp_vkDestroyOpticalFlowSessionNV = nullptr; #else - void * fp_vkDestroyOpticalFlowSessionNV{}; + void* fp_vkDestroyOpticalFlowSessionNV{}; #endif #if (defined(VK_NV_optical_flow)) PFN_vkBindOpticalFlowSessionImageNV fp_vkBindOpticalFlowSessionImageNV = nullptr; #else - void * fp_vkBindOpticalFlowSessionImageNV{}; + void* fp_vkBindOpticalFlowSessionImageNV{}; #endif #if (defined(VK_NV_optical_flow)) PFN_vkCmdOpticalFlowExecuteNV fp_vkCmdOpticalFlowExecuteNV = nullptr; #else - void * fp_vkCmdOpticalFlowExecuteNV{}; + void* fp_vkCmdOpticalFlowExecuteNV{}; #endif #if (defined(VK_EXT_device_fault)) PFN_vkGetDeviceFaultInfoEXT fp_vkGetDeviceFaultInfoEXT = nullptr; #else - void * fp_vkGetDeviceFaultInfoEXT{}; + void* fp_vkGetDeviceFaultInfoEXT{}; #endif #if (defined(VK_EXT_depth_bias_control)) PFN_vkCmdSetDepthBias2EXT fp_vkCmdSetDepthBias2EXT = nullptr; #else - void * fp_vkCmdSetDepthBias2EXT{}; + void* fp_vkCmdSetDepthBias2EXT{}; #endif #if (defined(VK_EXT_swapchain_maintenance1)) PFN_vkReleaseSwapchainImagesEXT fp_vkReleaseSwapchainImagesEXT = nullptr; #else - void * fp_vkReleaseSwapchainImagesEXT{}; + void* fp_vkReleaseSwapchainImagesEXT{}; #endif #if (defined(VK_KHR_maintenance5)) PFN_vkGetDeviceImageSubresourceLayoutKHR fp_vkGetDeviceImageSubresourceLayoutKHR = nullptr; #else - void * fp_vkGetDeviceImageSubresourceLayoutKHR{}; + void* fp_vkGetDeviceImageSubresourceLayoutKHR{}; #endif #if (defined(VK_KHR_map_memory2)) PFN_vkMapMemory2KHR fp_vkMapMemory2KHR = nullptr; #else - void * fp_vkMapMemory2KHR{}; + void* fp_vkMapMemory2KHR{}; #endif #if (defined(VK_KHR_map_memory2)) PFN_vkUnmapMemory2KHR fp_vkUnmapMemory2KHR = nullptr; #else - void * fp_vkUnmapMemory2KHR{}; + void* fp_vkUnmapMemory2KHR{}; #endif #if (defined(VK_EXT_shader_object)) PFN_vkCreateShadersEXT fp_vkCreateShadersEXT = nullptr; #else - void * fp_vkCreateShadersEXT{}; + void* fp_vkCreateShadersEXT{}; #endif #if (defined(VK_EXT_shader_object)) PFN_vkDestroyShaderEXT fp_vkDestroyShaderEXT = nullptr; #else - void * fp_vkDestroyShaderEXT{}; + void* fp_vkDestroyShaderEXT{}; #endif #if (defined(VK_EXT_shader_object)) PFN_vkGetShaderBinaryDataEXT fp_vkGetShaderBinaryDataEXT = nullptr; #else - void * fp_vkGetShaderBinaryDataEXT{}; + void* fp_vkGetShaderBinaryDataEXT{}; #endif #if (defined(VK_EXT_shader_object)) PFN_vkCmdBindShadersEXT fp_vkCmdBindShadersEXT = nullptr; #else - void * fp_vkCmdBindShadersEXT{}; + void* fp_vkCmdBindShadersEXT{}; #endif #if (defined(VK_QNX_external_memory_screen_buffer)) PFN_vkGetScreenBufferPropertiesQNX fp_vkGetScreenBufferPropertiesQNX = nullptr; #else - void * fp_vkGetScreenBufferPropertiesQNX{}; + void* fp_vkGetScreenBufferPropertiesQNX{}; #endif #if (defined(VK_AMDX_shader_enqueue)) PFN_vkGetExecutionGraphPipelineScratchSizeAMDX fp_vkGetExecutionGraphPipelineScratchSizeAMDX = nullptr; #else - void * fp_vkGetExecutionGraphPipelineScratchSizeAMDX{}; + void* fp_vkGetExecutionGraphPipelineScratchSizeAMDX{}; #endif #if (defined(VK_AMDX_shader_enqueue)) PFN_vkGetExecutionGraphPipelineNodeIndexAMDX fp_vkGetExecutionGraphPipelineNodeIndexAMDX = nullptr; #else - void * fp_vkGetExecutionGraphPipelineNodeIndexAMDX{}; + void* fp_vkGetExecutionGraphPipelineNodeIndexAMDX{}; #endif #if (defined(VK_AMDX_shader_enqueue)) PFN_vkCreateExecutionGraphPipelinesAMDX fp_vkCreateExecutionGraphPipelinesAMDX = nullptr; #else - void * fp_vkCreateExecutionGraphPipelinesAMDX{}; + void* fp_vkCreateExecutionGraphPipelinesAMDX{}; #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdInitializeGraphScratchMemoryAMDX fp_vkCmdInitializeGraphScratchMemoryAMDX = nullptr; #else - void * fp_vkCmdInitializeGraphScratchMemoryAMDX{}; + void* fp_vkCmdInitializeGraphScratchMemoryAMDX{}; #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdDispatchGraphAMDX fp_vkCmdDispatchGraphAMDX = nullptr; #else - void * fp_vkCmdDispatchGraphAMDX{}; + void* fp_vkCmdDispatchGraphAMDX{}; #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdDispatchGraphIndirectAMDX fp_vkCmdDispatchGraphIndirectAMDX = nullptr; #else - void * fp_vkCmdDispatchGraphIndirectAMDX{}; + void* fp_vkCmdDispatchGraphIndirectAMDX{}; #endif #if ((defined(VK_AMDX_shader_enqueue))) && VK_HEADER_VERSION >= 298 PFN_vkCmdDispatchGraphIndirectCountAMDX fp_vkCmdDispatchGraphIndirectCountAMDX = nullptr; #else - void * fp_vkCmdDispatchGraphIndirectCountAMDX{}; + void* fp_vkCmdDispatchGraphIndirectCountAMDX{}; #endif #if (defined(VK_KHR_maintenance6)) PFN_vkCmdBindDescriptorSets2KHR fp_vkCmdBindDescriptorSets2KHR = nullptr; #else - void * fp_vkCmdBindDescriptorSets2KHR{}; + void* fp_vkCmdBindDescriptorSets2KHR{}; #endif #if (defined(VK_KHR_maintenance6)) PFN_vkCmdPushConstants2KHR fp_vkCmdPushConstants2KHR = nullptr; #else - void * fp_vkCmdPushConstants2KHR{}; + void* fp_vkCmdPushConstants2KHR{}; #endif #if (defined(VK_KHR_maintenance6)) PFN_vkCmdPushDescriptorSet2KHR fp_vkCmdPushDescriptorSet2KHR = nullptr; #else - void * fp_vkCmdPushDescriptorSet2KHR{}; + void* fp_vkCmdPushDescriptorSet2KHR{}; #endif #if (defined(VK_KHR_maintenance6)) PFN_vkCmdPushDescriptorSetWithTemplate2KHR fp_vkCmdPushDescriptorSetWithTemplate2KHR = nullptr; #else - void * fp_vkCmdPushDescriptorSetWithTemplate2KHR{}; + void* fp_vkCmdPushDescriptorSetWithTemplate2KHR{}; #endif #if (defined(VK_KHR_maintenance6)) PFN_vkCmdSetDescriptorBufferOffsets2EXT fp_vkCmdSetDescriptorBufferOffsets2EXT = nullptr; #else - void * fp_vkCmdSetDescriptorBufferOffsets2EXT{}; + void* fp_vkCmdSetDescriptorBufferOffsets2EXT{}; #endif #if (defined(VK_KHR_maintenance6)) PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = nullptr; #else - void * fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT{}; + void* fp_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT{}; #endif #if (defined(VK_NV_low_latency2)) PFN_vkSetLatencySleepModeNV fp_vkSetLatencySleepModeNV = nullptr; #else - void * fp_vkSetLatencySleepModeNV{}; + void* fp_vkSetLatencySleepModeNV{}; #endif #if (defined(VK_NV_low_latency2)) PFN_vkLatencySleepNV fp_vkLatencySleepNV = nullptr; #else - void * fp_vkLatencySleepNV{}; + void* fp_vkLatencySleepNV{}; #endif #if (defined(VK_NV_low_latency2)) PFN_vkSetLatencyMarkerNV fp_vkSetLatencyMarkerNV = nullptr; #else - void * fp_vkSetLatencyMarkerNV{}; + void* fp_vkSetLatencyMarkerNV{}; #endif #if ((defined(VK_NV_low_latency2))) && VK_HEADER_VERSION >= 271 PFN_vkGetLatencyTimingsNV fp_vkGetLatencyTimingsNV = nullptr; #else - void * fp_vkGetLatencyTimingsNV{}; + void* fp_vkGetLatencyTimingsNV{}; #endif #if (defined(VK_NV_low_latency2)) PFN_vkQueueNotifyOutOfBandNV fp_vkQueueNotifyOutOfBandNV = nullptr; #else - void * fp_vkQueueNotifyOutOfBandNV{}; + void* fp_vkQueueNotifyOutOfBandNV{}; #endif #if (defined(VK_KHR_dynamic_rendering_local_read)) PFN_vkCmdSetRenderingAttachmentLocationsKHR fp_vkCmdSetRenderingAttachmentLocationsKHR = nullptr; #else - void * fp_vkCmdSetRenderingAttachmentLocationsKHR{}; + void* fp_vkCmdSetRenderingAttachmentLocationsKHR{}; #endif #if (defined(VK_KHR_dynamic_rendering_local_read)) PFN_vkCmdSetRenderingInputAttachmentIndicesKHR fp_vkCmdSetRenderingInputAttachmentIndicesKHR = nullptr; #else - void * fp_vkCmdSetRenderingInputAttachmentIndicesKHR{}; + void* fp_vkCmdSetRenderingInputAttachmentIndicesKHR{}; #endif #if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_depth_clamp_control)) PFN_vkCmdSetDepthClampRangeEXT fp_vkCmdSetDepthClampRangeEXT = nullptr; #else - void * fp_vkCmdSetDepthClampRangeEXT{}; + void* fp_vkCmdSetDepthClampRangeEXT{}; #endif #if (defined(VK_EXT_host_query_reset)) PFN_vkResetQueryPoolEXT fp_vkResetQueryPoolEXT = nullptr; #else - void * fp_vkResetQueryPoolEXT{}; + void* fp_vkResetQueryPoolEXT{}; #endif #if (defined(VK_KHR_maintenance1)) PFN_vkTrimCommandPoolKHR fp_vkTrimCommandPoolKHR = nullptr; #else - void * fp_vkTrimCommandPoolKHR{}; + void* fp_vkTrimCommandPoolKHR{}; #endif #if (defined(VK_KHR_device_group)) PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR fp_vkGetDeviceGroupPeerMemoryFeaturesKHR = nullptr; #else - void * fp_vkGetDeviceGroupPeerMemoryFeaturesKHR{}; + void* fp_vkGetDeviceGroupPeerMemoryFeaturesKHR{}; #endif #if (defined(VK_KHR_bind_memory2)) PFN_vkBindBufferMemory2KHR fp_vkBindBufferMemory2KHR = nullptr; #else - void * fp_vkBindBufferMemory2KHR{}; + void* fp_vkBindBufferMemory2KHR{}; #endif #if (defined(VK_KHR_bind_memory2)) PFN_vkBindImageMemory2KHR fp_vkBindImageMemory2KHR = nullptr; #else - void * fp_vkBindImageMemory2KHR{}; + void* fp_vkBindImageMemory2KHR{}; #endif #if (defined(VK_KHR_device_group)) PFN_vkCmdSetDeviceMaskKHR fp_vkCmdSetDeviceMaskKHR = nullptr; #else - void * fp_vkCmdSetDeviceMaskKHR{}; + void* fp_vkCmdSetDeviceMaskKHR{}; #endif #if (defined(VK_KHR_device_group)) PFN_vkCmdDispatchBaseKHR fp_vkCmdDispatchBaseKHR = nullptr; #else - void * fp_vkCmdDispatchBaseKHR{}; + void* fp_vkCmdDispatchBaseKHR{}; #endif #if (defined(VK_KHR_descriptor_update_template)) PFN_vkCreateDescriptorUpdateTemplateKHR fp_vkCreateDescriptorUpdateTemplateKHR = nullptr; #else - void * fp_vkCreateDescriptorUpdateTemplateKHR{}; + void* fp_vkCreateDescriptorUpdateTemplateKHR{}; #endif #if (defined(VK_KHR_descriptor_update_template)) PFN_vkDestroyDescriptorUpdateTemplateKHR fp_vkDestroyDescriptorUpdateTemplateKHR = nullptr; #else - void * fp_vkDestroyDescriptorUpdateTemplateKHR{}; + void* fp_vkDestroyDescriptorUpdateTemplateKHR{}; #endif #if (defined(VK_KHR_descriptor_update_template)) PFN_vkUpdateDescriptorSetWithTemplateKHR fp_vkUpdateDescriptorSetWithTemplateKHR = nullptr; #else - void * fp_vkUpdateDescriptorSetWithTemplateKHR{}; + void* fp_vkUpdateDescriptorSetWithTemplateKHR{}; #endif #if (defined(VK_KHR_get_memory_requirements2)) PFN_vkGetBufferMemoryRequirements2KHR fp_vkGetBufferMemoryRequirements2KHR = nullptr; #else - void * fp_vkGetBufferMemoryRequirements2KHR{}; + void* fp_vkGetBufferMemoryRequirements2KHR{}; #endif #if (defined(VK_KHR_get_memory_requirements2)) PFN_vkGetImageMemoryRequirements2KHR fp_vkGetImageMemoryRequirements2KHR = nullptr; #else - void * fp_vkGetImageMemoryRequirements2KHR{}; + void* fp_vkGetImageMemoryRequirements2KHR{}; #endif #if (defined(VK_KHR_get_memory_requirements2)) PFN_vkGetImageSparseMemoryRequirements2KHR fp_vkGetImageSparseMemoryRequirements2KHR = nullptr; #else - void * fp_vkGetImageSparseMemoryRequirements2KHR{}; + void* fp_vkGetImageSparseMemoryRequirements2KHR{}; #endif #if (defined(VK_KHR_maintenance4)) PFN_vkGetDeviceBufferMemoryRequirementsKHR fp_vkGetDeviceBufferMemoryRequirementsKHR = nullptr; #else - void * fp_vkGetDeviceBufferMemoryRequirementsKHR{}; + void* fp_vkGetDeviceBufferMemoryRequirementsKHR{}; #endif #if (defined(VK_KHR_maintenance4)) PFN_vkGetDeviceImageMemoryRequirementsKHR fp_vkGetDeviceImageMemoryRequirementsKHR = nullptr; #else - void * fp_vkGetDeviceImageMemoryRequirementsKHR{}; + void* fp_vkGetDeviceImageMemoryRequirementsKHR{}; #endif #if (defined(VK_KHR_maintenance4)) PFN_vkGetDeviceImageSparseMemoryRequirementsKHR fp_vkGetDeviceImageSparseMemoryRequirementsKHR = nullptr; #else - void * fp_vkGetDeviceImageSparseMemoryRequirementsKHR{}; + void* fp_vkGetDeviceImageSparseMemoryRequirementsKHR{}; #endif #if (defined(VK_KHR_sampler_ycbcr_conversion)) PFN_vkCreateSamplerYcbcrConversionKHR fp_vkCreateSamplerYcbcrConversionKHR = nullptr; #else - void * fp_vkCreateSamplerYcbcrConversionKHR{}; + void* fp_vkCreateSamplerYcbcrConversionKHR{}; #endif #if (defined(VK_KHR_sampler_ycbcr_conversion)) PFN_vkDestroySamplerYcbcrConversionKHR fp_vkDestroySamplerYcbcrConversionKHR = nullptr; #else - void * fp_vkDestroySamplerYcbcrConversionKHR{}; + void* fp_vkDestroySamplerYcbcrConversionKHR{}; #endif #if (defined(VK_KHR_maintenance3)) PFN_vkGetDescriptorSetLayoutSupportKHR fp_vkGetDescriptorSetLayoutSupportKHR = nullptr; #else - void * fp_vkGetDescriptorSetLayoutSupportKHR{}; + void* fp_vkGetDescriptorSetLayoutSupportKHR{}; #endif #if (defined(VK_EXT_calibrated_timestamps)) PFN_vkGetCalibratedTimestampsEXT fp_vkGetCalibratedTimestampsEXT = nullptr; #else - void * fp_vkGetCalibratedTimestampsEXT{}; + void* fp_vkGetCalibratedTimestampsEXT{}; #endif #if (defined(VK_KHR_create_renderpass2)) PFN_vkCreateRenderPass2KHR fp_vkCreateRenderPass2KHR = nullptr; #else - void * fp_vkCreateRenderPass2KHR{}; + void* fp_vkCreateRenderPass2KHR{}; #endif #if (defined(VK_KHR_create_renderpass2)) PFN_vkCmdBeginRenderPass2KHR fp_vkCmdBeginRenderPass2KHR = nullptr; #else - void * fp_vkCmdBeginRenderPass2KHR{}; + void* fp_vkCmdBeginRenderPass2KHR{}; #endif #if (defined(VK_KHR_create_renderpass2)) PFN_vkCmdNextSubpass2KHR fp_vkCmdNextSubpass2KHR = nullptr; #else - void * fp_vkCmdNextSubpass2KHR{}; + void* fp_vkCmdNextSubpass2KHR{}; #endif #if (defined(VK_KHR_create_renderpass2)) PFN_vkCmdEndRenderPass2KHR fp_vkCmdEndRenderPass2KHR = nullptr; #else - void * fp_vkCmdEndRenderPass2KHR{}; + void* fp_vkCmdEndRenderPass2KHR{}; #endif #if (defined(VK_KHR_timeline_semaphore)) PFN_vkGetSemaphoreCounterValueKHR fp_vkGetSemaphoreCounterValueKHR = nullptr; #else - void * fp_vkGetSemaphoreCounterValueKHR{}; + void* fp_vkGetSemaphoreCounterValueKHR{}; #endif #if (defined(VK_KHR_timeline_semaphore)) PFN_vkWaitSemaphoresKHR fp_vkWaitSemaphoresKHR = nullptr; #else - void * fp_vkWaitSemaphoresKHR{}; + void* fp_vkWaitSemaphoresKHR{}; #endif #if (defined(VK_KHR_timeline_semaphore)) PFN_vkSignalSemaphoreKHR fp_vkSignalSemaphoreKHR = nullptr; #else - void * fp_vkSignalSemaphoreKHR{}; + void* fp_vkSignalSemaphoreKHR{}; #endif #if (defined(VK_AMD_draw_indirect_count)) PFN_vkCmdDrawIndirectCountAMD fp_vkCmdDrawIndirectCountAMD = nullptr; #else - void * fp_vkCmdDrawIndirectCountAMD{}; + void* fp_vkCmdDrawIndirectCountAMD{}; #endif #if (defined(VK_AMD_draw_indirect_count)) PFN_vkCmdDrawIndexedIndirectCountAMD fp_vkCmdDrawIndexedIndirectCountAMD = nullptr; #else - void * fp_vkCmdDrawIndexedIndirectCountAMD{}; + void* fp_vkCmdDrawIndexedIndirectCountAMD{}; #endif #if (defined(VK_NV_ray_tracing)) PFN_vkGetRayTracingShaderGroupHandlesNV fp_vkGetRayTracingShaderGroupHandlesNV = nullptr; #else - void * fp_vkGetRayTracingShaderGroupHandlesNV{}; + void* fp_vkGetRayTracingShaderGroupHandlesNV{}; #endif #if (defined(VK_KHR_buffer_device_address)) PFN_vkGetBufferOpaqueCaptureAddressKHR fp_vkGetBufferOpaqueCaptureAddressKHR = nullptr; #else - void * fp_vkGetBufferOpaqueCaptureAddressKHR{}; + void* fp_vkGetBufferOpaqueCaptureAddressKHR{}; #endif #if (defined(VK_EXT_buffer_device_address)) PFN_vkGetBufferDeviceAddressEXT fp_vkGetBufferDeviceAddressEXT = nullptr; #else - void * fp_vkGetBufferDeviceAddressEXT{}; + void* fp_vkGetBufferDeviceAddressEXT{}; #endif #if (defined(VK_KHR_buffer_device_address)) PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR = nullptr; #else - void * fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR{}; + void* fp_vkGetDeviceMemoryOpaqueCaptureAddressKHR{}; #endif #if (defined(VK_EXT_line_rasterization)) PFN_vkCmdSetLineStippleEXT fp_vkCmdSetLineStippleEXT = nullptr; #else - void * fp_vkCmdSetLineStippleEXT{}; + void* fp_vkCmdSetLineStippleEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetCullModeEXT fp_vkCmdSetCullModeEXT = nullptr; #else - void * fp_vkCmdSetCullModeEXT{}; + void* fp_vkCmdSetCullModeEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetFrontFaceEXT fp_vkCmdSetFrontFaceEXT = nullptr; #else - void * fp_vkCmdSetFrontFaceEXT{}; + void* fp_vkCmdSetFrontFaceEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetPrimitiveTopologyEXT fp_vkCmdSetPrimitiveTopologyEXT = nullptr; #else - void * fp_vkCmdSetPrimitiveTopologyEXT{}; + void* fp_vkCmdSetPrimitiveTopologyEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetViewportWithCountEXT fp_vkCmdSetViewportWithCountEXT = nullptr; #else - void * fp_vkCmdSetViewportWithCountEXT{}; + void* fp_vkCmdSetViewportWithCountEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetScissorWithCountEXT fp_vkCmdSetScissorWithCountEXT = nullptr; #else - void * fp_vkCmdSetScissorWithCountEXT{}; + void* fp_vkCmdSetScissorWithCountEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdBindVertexBuffers2EXT fp_vkCmdBindVertexBuffers2EXT = nullptr; #else - void * fp_vkCmdBindVertexBuffers2EXT{}; + void* fp_vkCmdBindVertexBuffers2EXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthTestEnableEXT fp_vkCmdSetDepthTestEnableEXT = nullptr; #else - void * fp_vkCmdSetDepthTestEnableEXT{}; + void* fp_vkCmdSetDepthTestEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthWriteEnableEXT fp_vkCmdSetDepthWriteEnableEXT = nullptr; #else - void * fp_vkCmdSetDepthWriteEnableEXT{}; + void* fp_vkCmdSetDepthWriteEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthCompareOpEXT fp_vkCmdSetDepthCompareOpEXT = nullptr; #else - void * fp_vkCmdSetDepthCompareOpEXT{}; + void* fp_vkCmdSetDepthCompareOpEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthBoundsTestEnableEXT fp_vkCmdSetDepthBoundsTestEnableEXT = nullptr; #else - void * fp_vkCmdSetDepthBoundsTestEnableEXT{}; + void* fp_vkCmdSetDepthBoundsTestEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetStencilTestEnableEXT fp_vkCmdSetStencilTestEnableEXT = nullptr; #else - void * fp_vkCmdSetStencilTestEnableEXT{}; + void* fp_vkCmdSetStencilTestEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetStencilOpEXT fp_vkCmdSetStencilOpEXT = nullptr; #else - void * fp_vkCmdSetStencilOpEXT{}; + void* fp_vkCmdSetStencilOpEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetRasterizerDiscardEnableEXT fp_vkCmdSetRasterizerDiscardEnableEXT = nullptr; #else - void * fp_vkCmdSetRasterizerDiscardEnableEXT{}; + void* fp_vkCmdSetRasterizerDiscardEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetDepthBiasEnableEXT fp_vkCmdSetDepthBiasEnableEXT = nullptr; #else - void * fp_vkCmdSetDepthBiasEnableEXT{}; + void* fp_vkCmdSetDepthBiasEnableEXT{}; #endif #if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) PFN_vkCmdSetPrimitiveRestartEnableEXT fp_vkCmdSetPrimitiveRestartEnableEXT = nullptr; #else - void * fp_vkCmdSetPrimitiveRestartEnableEXT{}; + void* fp_vkCmdSetPrimitiveRestartEnableEXT{}; #endif #if (defined(VK_EXT_private_data)) PFN_vkCreatePrivateDataSlotEXT fp_vkCreatePrivateDataSlotEXT = nullptr; #else - void * fp_vkCreatePrivateDataSlotEXT{}; + void* fp_vkCreatePrivateDataSlotEXT{}; #endif #if (defined(VK_EXT_private_data)) PFN_vkDestroyPrivateDataSlotEXT fp_vkDestroyPrivateDataSlotEXT = nullptr; #else - void * fp_vkDestroyPrivateDataSlotEXT{}; + void* fp_vkDestroyPrivateDataSlotEXT{}; #endif #if (defined(VK_EXT_private_data)) PFN_vkSetPrivateDataEXT fp_vkSetPrivateDataEXT = nullptr; #else - void * fp_vkSetPrivateDataEXT{}; + void* fp_vkSetPrivateDataEXT{}; #endif #if (defined(VK_EXT_private_data)) PFN_vkGetPrivateDataEXT fp_vkGetPrivateDataEXT = nullptr; #else - void * fp_vkGetPrivateDataEXT{}; + void* fp_vkGetPrivateDataEXT{}; #endif #if (defined(VK_KHR_copy_commands2)) PFN_vkCmdCopyBuffer2KHR fp_vkCmdCopyBuffer2KHR = nullptr; #else - void * fp_vkCmdCopyBuffer2KHR{}; + void* fp_vkCmdCopyBuffer2KHR{}; #endif #if (defined(VK_KHR_copy_commands2)) PFN_vkCmdCopyImage2KHR fp_vkCmdCopyImage2KHR = nullptr; #else - void * fp_vkCmdCopyImage2KHR{}; + void* fp_vkCmdCopyImage2KHR{}; #endif #if (defined(VK_KHR_copy_commands2)) PFN_vkCmdBlitImage2KHR fp_vkCmdBlitImage2KHR = nullptr; #else - void * fp_vkCmdBlitImage2KHR{}; + void* fp_vkCmdBlitImage2KHR{}; #endif #if (defined(VK_KHR_copy_commands2)) PFN_vkCmdCopyBufferToImage2KHR fp_vkCmdCopyBufferToImage2KHR = nullptr; #else - void * fp_vkCmdCopyBufferToImage2KHR{}; + void* fp_vkCmdCopyBufferToImage2KHR{}; #endif #if (defined(VK_KHR_copy_commands2)) PFN_vkCmdCopyImageToBuffer2KHR fp_vkCmdCopyImageToBuffer2KHR = nullptr; #else - void * fp_vkCmdCopyImageToBuffer2KHR{}; + void* fp_vkCmdCopyImageToBuffer2KHR{}; #endif #if (defined(VK_KHR_copy_commands2)) PFN_vkCmdResolveImage2KHR fp_vkCmdResolveImage2KHR = nullptr; #else - void * fp_vkCmdResolveImage2KHR{}; + void* fp_vkCmdResolveImage2KHR{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkCmdSetEvent2KHR fp_vkCmdSetEvent2KHR = nullptr; #else - void * fp_vkCmdSetEvent2KHR{}; + void* fp_vkCmdSetEvent2KHR{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkCmdResetEvent2KHR fp_vkCmdResetEvent2KHR = nullptr; #else - void * fp_vkCmdResetEvent2KHR{}; + void* fp_vkCmdResetEvent2KHR{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkCmdWaitEvents2KHR fp_vkCmdWaitEvents2KHR = nullptr; #else - void * fp_vkCmdWaitEvents2KHR{}; + void* fp_vkCmdWaitEvents2KHR{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkCmdPipelineBarrier2KHR fp_vkCmdPipelineBarrier2KHR = nullptr; #else - void * fp_vkCmdPipelineBarrier2KHR{}; + void* fp_vkCmdPipelineBarrier2KHR{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkQueueSubmit2KHR fp_vkQueueSubmit2KHR = nullptr; #else - void * fp_vkQueueSubmit2KHR{}; + void* fp_vkQueueSubmit2KHR{}; #endif #if (defined(VK_KHR_synchronization2)) PFN_vkCmdWriteTimestamp2KHR fp_vkCmdWriteTimestamp2KHR = nullptr; #else - void * fp_vkCmdWriteTimestamp2KHR{}; + void* fp_vkCmdWriteTimestamp2KHR{}; #endif #if (defined(VK_KHR_dynamic_rendering)) PFN_vkCmdBeginRenderingKHR fp_vkCmdBeginRenderingKHR = nullptr; #else - void * fp_vkCmdBeginRenderingKHR{}; + void* fp_vkCmdBeginRenderingKHR{}; #endif #if (defined(VK_KHR_dynamic_rendering)) PFN_vkCmdEndRenderingKHR fp_vkCmdEndRenderingKHR = nullptr; #else - void * fp_vkCmdEndRenderingKHR{}; + void* fp_vkCmdEndRenderingKHR{}; #endif #if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) PFN_vkGetImageSubresourceLayout2EXT fp_vkGetImageSubresourceLayout2EXT = nullptr; #else - void * fp_vkGetImageSubresourceLayout2EXT{}; + void* fp_vkGetImageSubresourceLayout2EXT{}; #endif - bool is_populated() const { return populated; } - VkDevice device = VK_NULL_HANDLE; + bool is_populated() const { return populated; } + VkDevice device = VK_NULL_HANDLE; PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr; -private: - bool populated = false; + + private: + bool populated = false; }; } // namespace vkb \ No newline at end of file diff --git a/test/test_apps/multisample-depth/app.cpp b/test/test_apps/multisample-depth/app.cpp index bd5f87fbbd..1701a7fc69 100644 --- a/test/test_apps/multisample-depth/app.cpp +++ b/test/test_apps/multisample-depth/app.cpp @@ -445,7 +445,7 @@ bool App::frame(const int frame_num) { VkImageMemoryBarrier image_barriers[2]; - image_barriers[0] = {}; + image_barriers[0] = {}; image_barriers[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; image_barriers[0].image = render_targets[image_index]; image_barriers[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; @@ -455,7 +455,7 @@ bool App::frame(const int frame_num) image_barriers[0].subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; image_barriers[0].srcAccessMask = VK_ACCESS_NONE; image_barriers[0].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - image_barriers[1] = {}; + image_barriers[1] = {}; image_barriers[1].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; image_barriers[1].image = depth_images[image_index]; image_barriers[1].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; @@ -464,10 +464,13 @@ bool App::frame(const int frame_num) image_barriers[1].subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS; image_barriers[1].subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS; image_barriers[1].srcAccessMask = VK_ACCESS_NONE; - image_barriers[1].dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT|VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; + image_barriers[1].dstAccessMask = + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; init.disp.cmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, 0, 0, nullptr, @@ -483,12 +486,9 @@ bool App::frame(const int frame_num) render_pass_info.framebuffer = this->framebuffers[image_index]; render_pass_info.renderArea.offset = { 0, 0 }; render_pass_info.renderArea.extent = init.swapchain.extent; - VkClearValue clearColors[] = { - { { { 0.0f, 0.0f, 0.0f, 1.0f } } }, - { { 1.0f } } - }; - render_pass_info.clearValueCount = 2; - render_pass_info.pClearValues = clearColors; + VkClearValue clearColors[] = { { { { 0.0f, 0.0f, 0.0f, 1.0f } } }, { { 1.0f } } }; + render_pass_info.clearValueCount = 2; + render_pass_info.pClearValues = clearColors; VkViewport viewport = {}; viewport.x = 0.0f; @@ -515,9 +515,11 @@ bool App::frame(const int frame_num) { VkMemoryBarrier memory_barrier = {}; - memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - memory_barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - memory_barrier.dstAccessMask = VK_ACCESS_NONE; + memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; + memory_barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + memory_barrier.dstAccessMask = VK_ACCESS_NONE; VkImageMemoryBarrier image_barrier = {}; image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; image_barrier.image = init.swapchain_images[image_index]; @@ -529,7 +531,9 @@ bool App::frame(const int frame_num) image_barrier.srcAccessMask = VK_ACCESS_NONE; image_barrier.dstAccessMask = VK_ACCESS_NONE; init.disp.cmdPipelineBarrier(command_buffer, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 1, @@ -646,7 +650,8 @@ void App::setup() auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - for (auto& command_pool : command_pools) { + for (auto& command_pool : command_pools) + { command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); } diff --git a/test/test_apps/triangle/app.cpp b/test/test_apps/triangle/app.cpp index b090e6ad69..e67ff9e1c1 100644 --- a/test/test_apps/triangle/app.cpp +++ b/test/test_apps/triangle/app.cpp @@ -52,7 +52,7 @@ class App : public gfxrecon::test::TestAppBase VkPipelineLayout pipeline_layout; VkPipeline graphics_pipeline; - VkCommandPool command_pools[MAX_FRAMES_IN_FLIGHT]; + VkCommandPool command_pools[MAX_FRAMES_IN_FLIGHT]; size_t current_frame = 0; @@ -282,9 +282,9 @@ bool App::frame(const int frame_num) init.disp.resetCommandPool(this->command_pools[current_frame], 0); VkCommandBufferAllocateInfo allocate_info = {}; - allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; - allocate_info.commandBufferCount = 1; - allocate_info.commandPool = this->command_pools[current_frame]; + allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + allocate_info.commandBufferCount = 1; + allocate_info.commandPool = this->command_pools[current_frame]; VkCommandBuffer command_buffer; result = init.disp.allocateCommandBuffers(&allocate_info, &command_buffer); VERIFY_VK_RESULT("failed to allocate command buffer", result); @@ -292,12 +292,12 @@ bool App::frame(const int frame_num) { VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - result = init.disp.beginCommandBuffer(command_buffer, &begin_info); + result = init.disp.beginCommandBuffer(command_buffer, &begin_info); VERIFY_VK_RESULT("failed to create command buffer", result); { VkImageMemoryBarrier image_barrier = {}; - image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; image_barrier.image = init.swapchain_images[image_index]; image_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; image_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; @@ -353,7 +353,7 @@ bool App::frame(const int frame_num) { VkImageMemoryBarrier image_barrier = {}; - image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + image_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; image_barrier.image = init.swapchain_images[image_index]; image_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; image_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; @@ -469,7 +469,8 @@ void App::setup() auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); if (!queue_family_index) throw std::runtime_error("could not find graphics queue"); - for (auto& command_pool : command_pools) { + for (auto& command_pool : command_pools) + { command_pool = gfxrecon::test::create_command_pool(init.disp, *queue_family_index); } From 1c98a17357b1ea340d5bc081686469aeb649156d Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sun, 27 Oct 2024 16:02:01 -0400 Subject: [PATCH 25/70] Tricky format issue --- test/test_apps/common/test_app_base.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 6bb8f2e87a..114b291607 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -174,7 +174,7 @@ class VulkanFunctions if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); #elif defined(_WIN32) - library = LoadLibrary(TEXT("vulkan-1.dll")); + library = LoadLibrary(TEXT("vulkan-1.dll")); #else assert(false && "Unsupported platform"); #endif From 4328526265efc632d855c0dabd15684ab48f81bd Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 2 Nov 2024 11:04:29 -0400 Subject: [PATCH 26/70] Remove sdl submodule --- .gitmodules | 3 --- external/SDL | 1 - 2 files changed, 4 deletions(-) delete mode 160000 external/SDL diff --git a/.gitmodules b/.gitmodules index af8a56b759..b2ee74512d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,6 +7,3 @@ [submodule "external/SPIRV-Reflect"] path = external/SPIRV-Reflect url = https://github.com/KhronosGroup/SPIRV-Reflect.git -[submodule "external/SDL"] - path = external/SDL - url = https://github.com/libsdl-org/SDL.git diff --git a/external/SDL b/external/SDL deleted file mode 160000 index 3ebfdb04be..0000000000 --- a/external/SDL +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3ebfdb04be8d543321524113951f817e0e2c56c4 From 5dfb7f7a4d35f6531731791d2380a8552a8b8a8f Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 2 Nov 2024 14:02:57 -0400 Subject: [PATCH 27/70] Add SDL3 dependency through CMAKE Fix shader copies and dll dependencies for test apps --- CMakeLists.txt | 1 - cmake/FindSDL3.cmake | 7 +++++++ test/test_apps/CMakeLists.txt | 2 ++ test/test_apps/multisample-depth/CMakeLists.txt | 5 ++++- test/test_apps/triangle/CMakeLists.txt | 5 ++++- 5 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 cmake/FindSDL3.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 71e372bae2..74b737c41c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -354,7 +354,6 @@ if (${RUN_TESTS}) target_include_directories(catch2 INTERFACE external) endif() -add_subdirectory(external/SDL) add_subdirectory(external/nlohmann) add_subdirectory(framework) if(NOT BUILD_STATIC) diff --git a/cmake/FindSDL3.cmake b/cmake/FindSDL3.cmake new file mode 100644 index 0000000000..3202a2c94d --- /dev/null +++ b/cmake/FindSDL3.cmake @@ -0,0 +1,7 @@ +include(FetchContent) +FetchContent_Declare( + sdl + GIT_REPOSITORY https://github.com/libsdl-org/SDL.git + GIT_TAG preview-3.1.3 +) +FetchContent_MakeAvailable(sdl) diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt index 2241ef3611..2325f01b0f 100644 --- a/test/test_apps/CMakeLists.txt +++ b/test/test_apps/CMakeLists.txt @@ -26,6 +26,8 @@ # Description: CMake script for test apps ############################################################################### +find_package(SDL3) + add_custom_target(gfxrecon-testapps) add_subdirectory(triangle) diff --git a/test/test_apps/multisample-depth/CMakeLists.txt b/test/test_apps/multisample-depth/CMakeLists.txt index 526a9a5e51..179550423d 100644 --- a/test/test_apps/multisample-depth/CMakeLists.txt +++ b/test/test_apps/multisample-depth/CMakeLists.txt @@ -61,8 +61,11 @@ add_custom_command( POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory - ${CMAKE_CURRENT_LIST_DIR}/shaders "$" + ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) +add_custom_command(TARGET gfxrecon-testapp-multisample-depth POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) install(TARGETS gfxrecon-testapp-multisample-depth RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) diff --git a/test/test_apps/triangle/CMakeLists.txt b/test/test_apps/triangle/CMakeLists.txt index e157543a2c..b63a645460 100644 --- a/test/test_apps/triangle/CMakeLists.txt +++ b/test/test_apps/triangle/CMakeLists.txt @@ -63,8 +63,11 @@ add_custom_command( POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory - ${CMAKE_CURRENT_LIST_DIR}/shaders "$" + ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) +add_custom_command(TARGET gfxrecon-testapp-triangle POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) install(TARGETS gfxrecon-testapp-triangle RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) From 4e72f0bad44644d1f1112ad7dbe3ffc942f820af Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 2 Nov 2024 16:02:16 -0400 Subject: [PATCH 28/70] Win32 conditional to copy dlls --- .../multisample-depth/CMakeLists.txt | 22 +++++++++------- test/test_apps/triangle/CMakeLists.txt | 26 ++++++++++--------- 2 files changed, 26 insertions(+), 22 deletions(-) diff --git a/test/test_apps/multisample-depth/CMakeLists.txt b/test/test_apps/multisample-depth/CMakeLists.txt index 179550423d..799eef7310 100644 --- a/test/test_apps/multisample-depth/CMakeLists.txt +++ b/test/test_apps/multisample-depth/CMakeLists.txt @@ -29,9 +29,9 @@ add_executable(gfxrecon-testapp-multisample-depth "") target_sources(gfxrecon-testapp-multisample-depth - PRIVATE - ${CMAKE_CURRENT_LIST_DIR}/app.cpp - ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) target_include_directories(gfxrecon-testapp-multisample-depth PUBLIC ${CMAKE_BINARY_DIR} @@ -47,12 +47,12 @@ target_link_libraries(gfxrecon-testapp-multisample-depth if (MSVC) # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. - if(CMAKE_SIZEOF_VOID_P EQUAL 4) + if (CMAKE_SIZEOF_VOID_P EQUAL 4) target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") - else() + else () target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") - endif() -endif() + endif () +endif () common_build_directives(gfxrecon-testapp-multisample-depth) @@ -63,9 +63,11 @@ add_custom_command( ${CMAKE_COMMAND} -E copy_directory ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) -add_custom_command(TARGET gfxrecon-testapp-multisample-depth POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ - COMMAND_EXPAND_LISTS) +if (WIN32) + add_custom_command(TARGET gfxrecon-testapp-multisample-depth POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) +endif () install(TARGETS gfxrecon-testapp-multisample-depth RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) diff --git a/test/test_apps/triangle/CMakeLists.txt b/test/test_apps/triangle/CMakeLists.txt index b63a645460..1b04f409ea 100644 --- a/test/test_apps/triangle/CMakeLists.txt +++ b/test/test_apps/triangle/CMakeLists.txt @@ -29,9 +29,9 @@ add_executable(gfxrecon-testapp-triangle "") target_sources(gfxrecon-testapp-triangle - PRIVATE - ${CMAKE_CURRENT_LIST_DIR}/app.cpp - ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) target_include_directories(gfxrecon-testapp-triangle PUBLIC ${CMAKE_BINARY_DIR} @@ -49,12 +49,12 @@ target_link_libraries(gfxrecon-testapp-triangle if (MSVC) # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. - if(CMAKE_SIZEOF_VOID_P EQUAL 4) + if (CMAKE_SIZEOF_VOID_P EQUAL 4) target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") - else() + else () target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") - endif() -endif() + endif () +endif () common_build_directives(gfxrecon-testapp-triangle) @@ -62,12 +62,14 @@ add_custom_command( TARGET gfxrecon-testapp-triangle POST_BUILD COMMAND - ${CMAKE_COMMAND} -E copy_directory - ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) -add_custom_command(TARGET gfxrecon-testapp-triangle POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ - COMMAND_EXPAND_LISTS) +if (WIN32) + add_custom_command(TARGET gfxrecon-testapp-triangle POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) +endif () install(TARGETS gfxrecon-testapp-triangle RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) From 508d8f921fa77359e7c784ba14028988af1ebc3a Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Sat, 2 Nov 2024 16:06:33 -0400 Subject: [PATCH 29/70] Fix copyright names and years --- test/CMakeLists.txt | 4 +--- test/test_apps/CMakeLists.txt | 4 +--- test/test_apps/common/generate_dispatch.py | 1 - test/test_apps/common/test_app_base.cpp | 1 - test/test_apps/common/test_app_base.h | 1 - test/test_apps/multisample-depth/CMakeLists.txt | 4 +--- test/test_apps/multisample-depth/app.cpp | 1 - test/test_apps/triangle/CMakeLists.txt | 4 +--- test/test_apps/triangle/app.cpp | 1 - 9 files changed, 4 insertions(+), 17 deletions(-) diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index b6c5201c71..c65b5fdc32 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -1,6 +1,5 @@ ############################################################################### -# Copyright (c) 2018-2020 LunarG, Inc. -# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# Copyright (c) 2018-2024 LunarG, Inc. # All rights reserved # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -22,7 +21,6 @@ # IN THE SOFTWARE. # # Author: LunarG Team -# Author: AMD Developer Tools Team # Description: CMake script for gfxrecon tests ############################################################################### diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt index 2325f01b0f..cac6ffd32f 100644 --- a/test/test_apps/CMakeLists.txt +++ b/test/test_apps/CMakeLists.txt @@ -1,6 +1,5 @@ ############################################################################### -# Copyright (c) 2018-2020 LunarG, Inc. -# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# Copyright (c) 2018-2024 LunarG, Inc. # All rights reserved # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -22,7 +21,6 @@ # IN THE SOFTWARE. # # Author: LunarG Team -# Author: AMD Developer Tools Team # Description: CMake script for test apps ############################################################################### diff --git a/test/test_apps/common/generate_dispatch.py b/test/test_apps/common/generate_dispatch.py index 19b807d1af..9a4b8057f1 100644 --- a/test/test_apps/common/generate_dispatch.py +++ b/test/test_apps/common/generate_dispatch.py @@ -1,4 +1,3 @@ - # # generate_dispatch.py # diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 114b291607..f0c5e71ccb 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -1,5 +1,4 @@ /* -** Copyright (c) 2018-2023 Valve Corporation ** Copyright (c) 2018-2024 LunarG, Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 4d134dbece..748ef2892a 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -1,5 +1,4 @@ /* -** Copyright (c) 2018-2023 Valve Corporation ** Copyright (c) 2018-2024 LunarG, Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a diff --git a/test/test_apps/multisample-depth/CMakeLists.txt b/test/test_apps/multisample-depth/CMakeLists.txt index 799eef7310..b02883e9de 100644 --- a/test/test_apps/multisample-depth/CMakeLists.txt +++ b/test/test_apps/multisample-depth/CMakeLists.txt @@ -1,6 +1,5 @@ ############################################################################### -# Copyright (c) 2018-2020 LunarG, Inc. -# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# Copyright (c) 2018-2024 LunarG, Inc. # All rights reserved # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -22,7 +21,6 @@ # IN THE SOFTWARE. # # Author: LunarG Team -# Author: AMD Developer Tools Team # Description: CMake script for multisample depth test app ############################################################################### diff --git a/test/test_apps/multisample-depth/app.cpp b/test/test_apps/multisample-depth/app.cpp index 1701a7fc69..f31f00ab72 100644 --- a/test/test_apps/multisample-depth/app.cpp +++ b/test/test_apps/multisample-depth/app.cpp @@ -1,5 +1,4 @@ /* -** Copyright (c) 2018-2023 Valve Corporation ** Copyright (c) 2018-2024 LunarG, Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a diff --git a/test/test_apps/triangle/CMakeLists.txt b/test/test_apps/triangle/CMakeLists.txt index 1b04f409ea..9d4afead26 100644 --- a/test/test_apps/triangle/CMakeLists.txt +++ b/test/test_apps/triangle/CMakeLists.txt @@ -1,6 +1,5 @@ ############################################################################### -# Copyright (c) 2018-2020 LunarG, Inc. -# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# Copyright (c) 2018-2024 LunarG, Inc. # All rights reserved # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -22,7 +21,6 @@ # IN THE SOFTWARE. # # Author: LunarG Team -# Author: AMD Developer Tools Team # Description: CMake script for triangle test app ############################################################################### diff --git a/test/test_apps/triangle/app.cpp b/test/test_apps/triangle/app.cpp index e67ff9e1c1..b230b558ef 100644 --- a/test/test_apps/triangle/app.cpp +++ b/test/test_apps/triangle/app.cpp @@ -1,5 +1,4 @@ /* -** Copyright (c) 2018-2023 Valve Corporation ** Copyright (c) 2018-2024 LunarG, Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a From 5833c9a8be71e345d2360421335fa154f1a998f4 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 4 Nov 2024 15:55:01 -0500 Subject: [PATCH 30/70] Setup extensions from device builder --- test/test_apps/common/test_app_base.cpp | 12 ++++++++++++ test/test_apps/common/test_app_base.h | 18 ++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index f0c5e71ccb..67e11c4ac7 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -2962,6 +2962,18 @@ void TestAppBase::configure_physical_device_selector(PhysicalDeviceSelector& phy void TestAppBase::configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device) {} void TestAppBase::configure_swapchain_builder(SwapchainBuilder& swapchain_builder) {} +bool DeviceBuilder::enable_extension_if_present(const char* extension) { + return physical_device.enable_extension_if_present(extension); +} + +bool DeviceBuilder::enable_extensions_if_present(const std::vector& extensions) { + return physical_device.enable_extensions_if_present(extensions); +} + +bool DeviceBuilder::enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable) { + return physical_device.enable_features_if_present(features_to_enable); +} + GFXRECON_END_NAMESPACE(test) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 748ef2892a..9048cc7abb 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -783,6 +783,24 @@ class DeviceBuilder return *this; } + bool enable_extension_if_present(const char* extension); + + // If all the given extensions are present, make all the extensions be enabled on the device. + // Returns true if all the extensions are present. + bool enable_extensions_if_present(const std::vector& extensions); + + // If the features from VkPhysicalDeviceFeatures are all present, make all of the features be enable on the device. + // Returns true if all the features are present. + bool enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable); + + // If the features from the provided features struct are all present, make all of the features be enable on the + // device. Returns true if all of the features are present. + template + bool enable_extension_features_if_present(T const& features_check) + { + return physical_device.enable_extension_features_if_present(features_check); + } + // Provide custom allocation callbacks. DeviceBuilder& set_allocation_callbacks(VkAllocationCallbacks* callbacks); From 28e3e55f7f65239a2b0b76682e80dbe4c4cfc9fd Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 4 Nov 2024 16:02:28 -0500 Subject: [PATCH 31/70] Fix formatting --- test/test_apps/common/test_app_base.cpp | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 67e11c4ac7..39f70a05f9 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -173,7 +173,7 @@ class VulkanFunctions if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); #elif defined(_WIN32) - library = LoadLibrary(TEXT("vulkan-1.dll")); + library = LoadLibrary(TEXT("vulkan-1.dll")); #else assert(false && "Unsupported platform"); #endif @@ -2962,15 +2962,18 @@ void TestAppBase::configure_physical_device_selector(PhysicalDeviceSelector& phy void TestAppBase::configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device) {} void TestAppBase::configure_swapchain_builder(SwapchainBuilder& swapchain_builder) {} -bool DeviceBuilder::enable_extension_if_present(const char* extension) { +bool DeviceBuilder::enable_extension_if_present(const char* extension) +{ return physical_device.enable_extension_if_present(extension); } -bool DeviceBuilder::enable_extensions_if_present(const std::vector& extensions) { +bool DeviceBuilder::enable_extensions_if_present(const std::vector& extensions) +{ return physical_device.enable_extensions_if_present(extensions); } -bool DeviceBuilder::enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable) { +bool DeviceBuilder::enable_features_if_present(const VkPhysicalDeviceFeatures& features_to_enable) +{ return physical_device.enable_features_if_present(features_to_enable); } From adf499abc85870883ce3a937dbaa517a2ddd11e7 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 4 Nov 2024 16:58:44 -0500 Subject: [PATCH 32/70] Strange formatting issue --- test/test_apps/common/test_app_base.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 39f70a05f9..b447a4ccb0 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -173,7 +173,7 @@ class VulkanFunctions if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); #elif defined(_WIN32) - library = LoadLibrary(TEXT("vulkan-1.dll")); + library = LoadLibrary(TEXT("vulkan-1.dll")); #else assert(false && "Unsupported platform"); #endif From 02882b6053cc461f7f79d2bf17d61c911f81df28 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 4 Nov 2024 19:59:03 -0500 Subject: [PATCH 33/70] Removed overuse of this->. Renamed Init to InitInfo --- test/test_apps/common/test_app_base.cpp | 74 ++++++++++++------------- test/test_apps/common/test_app_base.h | 10 ++-- 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index b447a4ccb0..04e2382bfa 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -645,24 +645,24 @@ SystemInfo SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanc SystemInfo::SystemInfo() { auto available_layers_ret = detail::get_vector( - this->available_layers, detail::vulkan_functions().fp_vkEnumerateInstanceLayerProperties); + available_layers, detail::vulkan_functions().fp_vkEnumerateInstanceLayerProperties); if (available_layers_ret != VK_SUCCESS) { - this->available_layers.clear(); + available_layers.clear(); } - for (auto& layer : this->available_layers) + for (auto& layer : available_layers) if (strcmp(layer.layerName, detail::validation_layer_name) == 0) validation_layers_available = true; auto available_extensions_ret = detail::get_vector( - this->available_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, nullptr); + available_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, nullptr); if (available_extensions_ret != VK_SUCCESS) { - this->available_extensions.clear(); + available_extensions.clear(); } - for (auto& ext : this->available_extensions) + for (auto& ext : available_extensions) { if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) { @@ -670,15 +670,15 @@ SystemInfo::SystemInfo() } } - for (auto& layer : this->available_layers) + for (auto& layer : available_layers) { std::vector layer_extensions; auto layer_extensions_ret = detail::get_vector( layer_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, layer.layerName); if (layer_extensions_ret == VK_SUCCESS) { - this->available_extensions.insert( - this->available_extensions.end(), layer_extensions.begin(), layer_extensions.end()); + available_extensions.insert( + available_extensions.end(), layer_extensions.begin(), layer_extensions.end()); for (auto& ext : layer_extensions) { if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) @@ -727,7 +727,7 @@ void destroy_instance(Instance const& instance) Instance::operator VkInstance() const { - return this->instance; + return instance; } vkb::InstanceDispatchTable Instance::make_table() const @@ -1933,7 +1933,7 @@ bool PhysicalDevice::enable_features_node_if_present(detail::GenericFeaturesPNex PhysicalDevice::operator VkPhysicalDevice() const { - return this->physical_device; + return physical_device; } // ---- Queues ---- // @@ -2000,7 +2000,7 @@ vkb::DispatchTable Device::make_table() const Device::operator VkDevice() const { - return this->device; + return device; } CustomQueueDescription::CustomQueueDescription(uint32_t index, std::vector priorities) : @@ -2566,7 +2566,7 @@ void Swapchain::destroy_image_views(std::vector const& image_views) } Swapchain::operator VkSwapchainKHR() const { - return this->swapchain; + return swapchain; } SwapchainBuilder& SwapchainBuilder::set_old_swapchain(VkSwapchainKHR old_swapchain) { @@ -2820,12 +2820,12 @@ std::exception sdl_exception() return std::runtime_error(SDL_GetError()); } -void device_initialization_phase_1(const std::string& window_name, Init& init) +void device_initialization_phase_1(const std::string& window_name, InitInfo& init) { init.window = create_window_sdl(window_name.data(), true, 1024, 1024); } -void device_initialization_phase_2(InstanceBuilder const& instance_builder, Init& init) +void device_initialization_phase_2(InstanceBuilder const& instance_builder, InitInfo& init) { init.instance = instance_builder.build(); @@ -2834,19 +2834,19 @@ void device_initialization_phase_2(InstanceBuilder const& instance_builder, Init init.surface = create_surface_sdl(init.instance, init.window); } -PhysicalDevice device_initialization_phase_3(PhysicalDeviceSelector& phys_device_selector, Init& init) +PhysicalDevice device_initialization_phase_3(PhysicalDeviceSelector& phys_device_selector, InitInfo& init) { return phys_device_selector.set_surface(init.surface).select(); } -void device_initialization_phase_4(DeviceBuilder const& device_builder, Init& init) +void device_initialization_phase_4(DeviceBuilder const& device_builder, InitInfo& init) { init.device = device_builder.build(); init.disp = init.device.make_table(); } -void device_initialization_phase_5(SwapchainBuilder& swapchain_builder, Init& init) +void device_initialization_phase_5(SwapchainBuilder& swapchain_builder, InitInfo& init) { create_swapchain(swapchain_builder, init.swapchain); @@ -2854,9 +2854,9 @@ void device_initialization_phase_5(SwapchainBuilder& swapchain_builder, Init& in init.swapchain_image_views = init.swapchain.get_image_views(); } -Init device_initialization(const std::string& window_name) +InitInfo device_initialization(const std::string& window_name) { - Init init; + InitInfo init; device_initialization_phase_1(window_name, init); @@ -2875,7 +2875,7 @@ Init device_initialization(const std::string& window_name) return init; } -void cleanup_init(Init& init) +void cleanup_init(InitInfo& init) { init.swapchain.destroy_image_views(init.swapchain_image_views); @@ -2886,7 +2886,7 @@ void cleanup_init(Init& init) destroy_window_sdl(init.window); } -void recreate_init_swapchain(SwapchainBuilder& swapchain_builder, Init& init, bool wait_for_idle) +void recreate_init_swapchain(SwapchainBuilder& swapchain_builder, InitInfo& init, bool wait_for_idle) { if (wait_for_idle) init.disp.deviceWaitIdle(); @@ -2901,25 +2901,25 @@ void recreate_init_swapchain(SwapchainBuilder& swapchain_builder, Init& init, bo void TestAppBase::run(const std::string& window_name) { - device_initialization_phase_1(window_name, this->init); + device_initialization_phase_1(window_name, init); InstanceBuilder instance_builder; - this->configure_instance_builder(instance_builder); - device_initialization_phase_2(instance_builder, this->init); + configure_instance_builder(instance_builder); + device_initialization_phase_2(instance_builder, init); - PhysicalDeviceSelector phys_device_selector(this->init.instance); - this->configure_physical_device_selector(phys_device_selector); - init.physical_device = device_initialization_phase_3(phys_device_selector, this->init); + PhysicalDeviceSelector phys_device_selector(init.instance); + configure_physical_device_selector(phys_device_selector); + init.physical_device = device_initialization_phase_3(phys_device_selector, init); DeviceBuilder device_builder{ init.physical_device }; - this->configure_device_builder(device_builder, init.physical_device); - device_initialization_phase_4(device_builder, this->init); + configure_device_builder(device_builder, init.physical_device); + device_initialization_phase_4(device_builder, init); SwapchainBuilder swapchain_builder{ init.device }; - this->configure_swapchain_builder(swapchain_builder); - device_initialization_phase_5(swapchain_builder, this->init); + configure_swapchain_builder(swapchain_builder); + device_initialization_phase_5(swapchain_builder, init); - this->setup(); + setup(); bool running = true; int frame_num = 0; @@ -2938,17 +2938,17 @@ void TestAppBase::run(const std::string& window_name) ++frame_num; } - this->init.disp.deviceWaitIdle(); + init.disp.deviceWaitIdle(); - this->cleanup(); + cleanup(); - cleanup_init(this->init); + cleanup_init(init); } void TestAppBase::recreate_swapchain(bool wait_for_idle) { SwapchainBuilder swapchain_builder{ init.device }; - this->configure_swapchain_builder(swapchain_builder); + configure_swapchain_builder(swapchain_builder); recreate_init_swapchain(swapchain_builder, init, wait_for_idle); } diff --git a/test/test_apps/common/test_app_base.h b/test/test_apps/common/test_app_base.h index 9048cc7abb..7ce4ff03d5 100644 --- a/test/test_apps/common/test_app_base.h +++ b/test/test_apps/common/test_app_base.h @@ -1041,7 +1041,7 @@ VkShaderModule readShaderFromFile(vkb::DispatchTable const& disp, const std::str throw gfxrecon::test::vulkan_exception(message, result); \ } -struct Init +struct InitInfo { SDL_Window* window; Instance instance; @@ -1055,11 +1055,11 @@ struct Init std::vector swapchain_image_views; }; -Init device_initialization(const std::string& window_name); +InitInfo device_initialization(const std::string& window_name); -void cleanup_init(Init& init); +void cleanup_init(InitInfo& init); -void recreate_init_swapchain(Init& init, bool wait_for_idle = true); +void recreate_init_swapchain(InitInfo& init, bool wait_for_idle = true); class TestAppBase { @@ -1084,7 +1084,7 @@ class TestAppBase virtual void configure_device_builder(DeviceBuilder& device_builder, PhysicalDevice const& physical_device); virtual void configure_swapchain_builder(SwapchainBuilder& swapchain_builder); - Init init; + InitInfo init; }; GFXRECON_END_NAMESPACE(test) From e7ed80ef28a2dfd3711aaada6cfcda0c3dd40fbe Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Mon, 4 Nov 2024 20:49:37 -0500 Subject: [PATCH 34/70] Fix format issues --- test/test_apps/common/test_app_base.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 04e2382bfa..03bfada853 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -173,7 +173,7 @@ class VulkanFunctions if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); #elif defined(_WIN32) - library = LoadLibrary(TEXT("vulkan-1.dll")); + library = LoadLibrary(TEXT("vulkan-1.dll")); #else assert(false && "Unsupported platform"); #endif @@ -677,8 +677,7 @@ SystemInfo::SystemInfo() layer_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, layer.layerName); if (layer_extensions_ret == VK_SUCCESS) { - available_extensions.insert( - available_extensions.end(), layer_extensions.begin(), layer_extensions.end()); + available_extensions.insert(available_extensions.end(), layer_extensions.begin(), layer_extensions.end()); for (auto& ext : layer_extensions) { if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) From f7a628bb39e7474ff3a91cdb43e5276baddfc22c Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Tue, 5 Nov 2024 11:24:48 -0500 Subject: [PATCH 35/70] Same formatting issue as before --- test/test_apps/common/test_app_base.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_apps/common/test_app_base.cpp b/test/test_apps/common/test_app_base.cpp index 03bfada853..2c6c94006b 100644 --- a/test/test_apps/common/test_app_base.cpp +++ b/test/test_apps/common/test_app_base.cpp @@ -173,7 +173,7 @@ class VulkanFunctions if (!library) library = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); #elif defined(_WIN32) - library = LoadLibrary(TEXT("vulkan-1.dll")); + library = LoadLibrary(TEXT("vulkan-1.dll")); #else assert(false && "Unsupported platform"); #endif From 6128e34a0cf211457f98b5a1478d611d66d9deab Mon Sep 17 00:00:00 2001 From: ziga-lunarg Date: Sat, 2 Nov 2024 21:45:36 +0100 Subject: [PATCH 36/70] Test pipeline binaries and host image copy --- test/test_apps/CMakeLists.txt | 2 + test/test_apps/host-image-copy/CMakeLists.txt | 73 +++ test/test_apps/host-image-copy/app.cpp | 491 ++++++++++++++++++ .../pipeline-binaries/CMakeLists.txt | 76 +++ test/test_apps/pipeline-binaries/app.cpp | 319 ++++++++++++ .../pipeline-binaries/shaders/frag.spv | Bin 0 -> 608 bytes .../pipeline-binaries/shaders/vert.spv | Bin 0 -> 1540 bytes 7 files changed, 961 insertions(+) create mode 100644 test/test_apps/host-image-copy/CMakeLists.txt create mode 100644 test/test_apps/host-image-copy/app.cpp create mode 100644 test/test_apps/pipeline-binaries/CMakeLists.txt create mode 100644 test/test_apps/pipeline-binaries/app.cpp create mode 100644 test/test_apps/pipeline-binaries/shaders/frag.spv create mode 100644 test/test_apps/pipeline-binaries/shaders/vert.spv diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt index cac6ffd32f..ce838135dd 100644 --- a/test/test_apps/CMakeLists.txt +++ b/test/test_apps/CMakeLists.txt @@ -30,3 +30,5 @@ add_custom_target(gfxrecon-testapps) add_subdirectory(triangle) add_subdirectory(multisample-depth) +add_subdirectory(pipeline-binaries) +add_subdirectory(host-image-copy) diff --git a/test/test_apps/host-image-copy/CMakeLists.txt b/test/test_apps/host-image-copy/CMakeLists.txt new file mode 100644 index 0000000000..7c97d3741a --- /dev/null +++ b/test/test_apps/host-image-copy/CMakeLists.txt @@ -0,0 +1,73 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for host-image-copy test app +############################################################################### + +add_executable(gfxrecon-testapp-host-image-copy "") + +target_sources(gfxrecon-testapp-host-image-copy + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) + +target_include_directories(gfxrecon-testapp-host-image-copy PUBLIC + ${CMAKE_BINARY_DIR} + ${CMAKE_CURRENT_LIST_DIR}/../common) + +target_link_libraries(gfxrecon-testapp-host-image-copy + gfxrecon_application + gfxrecon_decode + gfxrecon_graphics + gfxrecon_format + gfxrecon_util + SDL3::SDL3 + platform_specific) + +if (MSVC) + # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. + # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. + if(CMAKE_SIZEOF_VOID_P EQUAL 4) + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") + else() + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") + endif() +endif() + +common_build_directives(gfxrecon-testapp-host-image-copy) + +add_custom_command( + TARGET gfxrecon-testapp-host-image-copy + POST_BUILD + DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) +if (WIN32) +add_custom_command(TARGET gfxrecon-testapp-host-image-copy POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) +endif () + +install(TARGETS gfxrecon-testapp-host-image-copy RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + +add_dependencies(gfxrecon-testapps gfxrecon-testapp-host-image-copy) \ No newline at end of file diff --git a/test/test_apps/host-image-copy/app.cpp b/test/test_apps/host-image-copy/app.cpp new file mode 100644 index 0000000000..46c44d4769 --- /dev/null +++ b/test/test_apps/host-image-copy/app.cpp @@ -0,0 +1,491 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include + +#include + +#include + +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test_app) + +GFXRECON_BEGIN_NAMESPACE(host_image_copy) + +class App : public gfxrecon::test::TestAppBase +{ + public: + App() = default; + + private: + VkPhysicalDeviceHostImageCopyFeaturesEXT host_image_copy_features; + + VkQueue queue; + uint32_t queue_index; + + const uint32_t image_width = 256u; + const uint32_t image_height = 256u; + const uint32_t buffer_size = image_width * image_height * 4u; + + VkBuffer src_buffer; + VkDeviceMemory src_buffer_memory; + VkBuffer dst_buffer; + VkDeviceMemory dst_buffer_memory; + VkImage src_image; + VkDeviceMemory src_image_memory; + VkImage dst_image; + VkDeviceMemory dst_image_memory; + + VkCommandPool command_pool; + VkCommandBuffer command_buffer; + VkFence fence; + + void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector); + + void configure_device_builder(test::DeviceBuilder& device_builder, + test::PhysicalDevice const& physical_device) override; + + uint32_t find_memory_type(uint32_t memoryTypeBits, VkMemoryPropertyFlags memory_property_flags); + void create_buffers_and_images(); + void allocate_command_buffer(); + void cleanup() override; + bool frame(const int frame_num) override; + void setup() override; +}; + +void App::configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector) +{ + phys_device_selector.add_required_extension("VK_KHR_copy_commands2"); + phys_device_selector.add_required_extension("VK_KHR_format_feature_flags2"); + phys_device_selector.add_required_extension("VK_EXT_host_image_copy"); +} + +void App::configure_device_builder(test::DeviceBuilder& device_builder, test::PhysicalDevice const& physical_device) +{ + host_image_copy_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT; + host_image_copy_features.pNext = nullptr; + host_image_copy_features.hostImageCopy = VK_TRUE; + device_builder.add_pNext(&host_image_copy_features); +} + +uint32_t App::find_memory_type(uint32_t memoryTypeBits, VkMemoryPropertyFlags memory_property_flags) +{ + VkPhysicalDeviceMemoryProperties memory_properties; + init.inst_disp.getPhysicalDeviceMemoryProperties(init.physical_device, &memory_properties); + + for (uint32_t i = 0; i < memory_properties.memoryTypeCount; ++i) + { + if ((memoryTypeBits & (1 << i)) && (memory_properties.memoryTypes[i].propertyFlags & memory_property_flags) > 0) + { + return i; + break; + } + } + + throw std::runtime_error("Could not find required memory type"); +} + +void App::create_buffers_and_images() +{ + VkBufferCreateInfo buffer_create_info; + buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + buffer_create_info.pNext = nullptr; + buffer_create_info.flags = 0u; + buffer_create_info.size = buffer_size; + buffer_create_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + buffer_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + buffer_create_info.queueFamilyIndexCount = 0u; + buffer_create_info.pQueueFamilyIndices = nullptr; + init.disp.createBuffer(&buffer_create_info, nullptr, &src_buffer); + + VkMemoryRequirements src_buffer_memory_requirements; + init.disp.getBufferMemoryRequirements(src_buffer, &src_buffer_memory_requirements); + + VkMemoryAllocateInfo src_buffer_memory_allocate_info; + src_buffer_memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; + src_buffer_memory_allocate_info.pNext = nullptr; + src_buffer_memory_allocate_info.allocationSize = src_buffer_memory_requirements.size; + src_buffer_memory_allocate_info.memoryTypeIndex = + find_memory_type(src_buffer_memory_requirements.memoryTypeBits, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT); + init.disp.allocateMemory(&src_buffer_memory_allocate_info, nullptr, &src_buffer_memory); + init.disp.bindBufferMemory(src_buffer, src_buffer_memory, 0u); + + buffer_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; + init.disp.createBuffer(&buffer_create_info, nullptr, &dst_buffer); + + VkMemoryRequirements dst_buffer_memory_requirements; + init.disp.getBufferMemoryRequirements(dst_buffer, &dst_buffer_memory_requirements); + + VkMemoryAllocateInfo dst_buffer_memory_allocate_info; + dst_buffer_memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; + dst_buffer_memory_allocate_info.pNext = nullptr; + dst_buffer_memory_allocate_info.allocationSize = dst_buffer_memory_requirements.size; + dst_buffer_memory_allocate_info.memoryTypeIndex = + find_memory_type(dst_buffer_memory_requirements.memoryTypeBits, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT); + init.disp.allocateMemory(&dst_buffer_memory_allocate_info, nullptr, &dst_buffer_memory); + init.disp.bindBufferMemory(dst_buffer, dst_buffer_memory, 0u); + + VkImageCreateInfo image_create_info; + image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_create_info.pNext = nullptr; + image_create_info.flags = 0u; + image_create_info.imageType = VK_IMAGE_TYPE_2D; + image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM; + image_create_info.extent = { image_width, image_height, 1u }; + image_create_info.mipLevels = 1u; + image_create_info.arrayLayers = 1u; + image_create_info.samples = VK_SAMPLE_COUNT_1_BIT; + image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL; + image_create_info.usage = + VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT; + image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + image_create_info.queueFamilyIndexCount = 0u; + image_create_info.pQueueFamilyIndices = nullptr; + image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + init.disp.createImage(&image_create_info, nullptr, &src_image); + + VkMemoryRequirements src_image_memory_requirements; + init.disp.getImageMemoryRequirements(src_image, &src_image_memory_requirements); + + VkMemoryAllocateInfo src_image_memory_allocate_info; + src_image_memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; + src_image_memory_allocate_info.pNext = nullptr; + src_image_memory_allocate_info.allocationSize = src_image_memory_requirements.size; + src_image_memory_allocate_info.memoryTypeIndex = + find_memory_type(src_image_memory_requirements.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); + init.disp.allocateMemory(&src_image_memory_allocate_info, nullptr, &src_image_memory); + init.disp.bindImageMemory(src_image, src_image_memory, 0u); + + init.disp.createImage(&image_create_info, nullptr, &dst_image); + + VkMemoryRequirements dst_image_memory_requirements; + init.disp.getImageMemoryRequirements(src_image, &dst_image_memory_requirements); + + VkMemoryAllocateInfo dst_image_memory_allocate_info; + dst_image_memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; + dst_image_memory_allocate_info.pNext = nullptr; + dst_image_memory_allocate_info.allocationSize = dst_image_memory_requirements.size; + dst_image_memory_allocate_info.memoryTypeIndex = + find_memory_type(dst_image_memory_requirements.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); + init.disp.allocateMemory(&dst_image_memory_allocate_info, nullptr, &dst_image_memory); + init.disp.bindImageMemory(dst_image, dst_image_memory, 0u); +} + +void App::allocate_command_buffer() +{ + VkCommandPoolCreateInfo command_pool_create_info; + command_pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + command_pool_create_info.pNext = nullptr; + command_pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + command_pool_create_info.queueFamilyIndex = queue_index; + init.disp.createCommandPool(&command_pool_create_info, nullptr, &command_pool); + + VkCommandBufferAllocateInfo command_buffer_allocate_info; + command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + command_buffer_allocate_info.pNext = nullptr; + command_buffer_allocate_info.commandPool = command_pool; + command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + command_buffer_allocate_info.commandBufferCount = 1u; + init.disp.allocateCommandBuffers(&command_buffer_allocate_info, &command_buffer); +} + +bool App::frame(const int frame_num) +{ + VkResult result = VK_SUCCESS; + + bool memory_to_image = frame_num == 0; + + VkHostImageLayoutTransitionInfoEXT host_image_layout_transition_infos[2]; + host_image_layout_transition_infos[0].sType = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT; + host_image_layout_transition_infos[0].pNext = nullptr; + host_image_layout_transition_infos[0].image = src_image; + host_image_layout_transition_infos[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + host_image_layout_transition_infos[0].newLayout = VK_IMAGE_LAYOUT_GENERAL; + host_image_layout_transition_infos[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + host_image_layout_transition_infos[0].subresourceRange.baseMipLevel = 0u; + host_image_layout_transition_infos[0].subresourceRange.levelCount = 1u; + host_image_layout_transition_infos[0].subresourceRange.baseArrayLayer = 0u; + host_image_layout_transition_infos[0].subresourceRange.layerCount = 1u; + host_image_layout_transition_infos[1].sType = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT; + host_image_layout_transition_infos[1].pNext = nullptr; + host_image_layout_transition_infos[1].image = dst_image; + host_image_layout_transition_infos[1].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + host_image_layout_transition_infos[1].newLayout = VK_IMAGE_LAYOUT_GENERAL; + host_image_layout_transition_infos[1].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + host_image_layout_transition_infos[1].subresourceRange.baseMipLevel = 0u; + host_image_layout_transition_infos[1].subresourceRange.levelCount = 1u; + host_image_layout_transition_infos[1].subresourceRange.baseArrayLayer = 0u; + host_image_layout_transition_infos[1].subresourceRange.layerCount = 1u; + result = init.disp.transitionImageLayoutEXT(2u, host_image_layout_transition_infos); + VERIFY_VK_RESULT("failed to transition image layouts", result); + + uint32_t* src_data; + result = init.disp.mapMemory(src_buffer_memory, 0u, buffer_size, 0u, (void**)&src_data); + VERIFY_VK_RESULT("failed to map src buffer memory", result); + for (uint32_t i = 0; i < buffer_size / sizeof(uint32_t); ++i) + { + src_data[i] = i + 1; + } + + uint32_t* dst_data; + result = init.disp.mapMemory(dst_buffer_memory, 0u, buffer_size, 0u, (void**)&dst_data); + VERIFY_VK_RESULT("failed to map memory", result); + + if (memory_to_image) + { + VkMemoryToImageCopyEXT memory_to_image_copy; + memory_to_image_copy.sType = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT; + memory_to_image_copy.pNext = nullptr; + memory_to_image_copy.pHostPointer = src_data; + memory_to_image_copy.memoryRowLength = 0u; + memory_to_image_copy.memoryImageHeight = 0u; + memory_to_image_copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + memory_to_image_copy.imageSubresource.mipLevel = 0u; + memory_to_image_copy.imageSubresource.baseArrayLayer = 0u; + memory_to_image_copy.imageSubresource.layerCount = 1u; + memory_to_image_copy.imageOffset = { 0, 0, 0 }; + memory_to_image_copy.imageExtent = { image_width, image_height, 1u }; + + VkCopyMemoryToImageInfoEXT copy_memory_to_image_info; + copy_memory_to_image_info.sType = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT; + copy_memory_to_image_info.pNext = nullptr; + copy_memory_to_image_info.flags = 0u; + copy_memory_to_image_info.dstImage = src_image; + copy_memory_to_image_info.dstImageLayout = VK_IMAGE_LAYOUT_GENERAL; + copy_memory_to_image_info.regionCount = 1u; + copy_memory_to_image_info.pRegions = &memory_to_image_copy; + result = init.disp.copyMemoryToImageEXT(©_memory_to_image_info); + VERIFY_VK_RESULT("failed to copy memory to image", result); + } + else + { + VkCommandBufferBeginInfo command_buffer_begin_info; + command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + command_buffer_begin_info.pNext = nullptr; + command_buffer_begin_info.flags = 0u; + command_buffer_begin_info.pInheritanceInfo = nullptr; + result = init.disp.beginCommandBuffer(command_buffer, &command_buffer_begin_info); + VERIFY_VK_RESULT("failed to begin command buffer", result); + + VkBufferImageCopy buffer_image_copy; + buffer_image_copy.bufferOffset = 0u; + buffer_image_copy.bufferRowLength = 0u; + buffer_image_copy.bufferImageHeight = 0u; + buffer_image_copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + buffer_image_copy.imageSubresource.mipLevel = 0u; + buffer_image_copy.imageSubresource.baseArrayLayer = 0u; + buffer_image_copy.imageSubresource.layerCount = 1u; + buffer_image_copy.imageOffset = { 0, 0, 0 }; + buffer_image_copy.imageExtent = { image_width, image_height, 1u }; + init.disp.cmdCopyBufferToImage( + command_buffer, src_buffer, src_image, VK_IMAGE_LAYOUT_GENERAL, 1u, &buffer_image_copy); + + result = init.disp.endCommandBuffer(command_buffer); + VERIFY_VK_RESULT("failed to end command buffer", result); + + result = init.disp.resetFences(1u, &fence); + VERIFY_VK_RESULT("failed to reset fence", result); + + VkSubmitInfo submit_info; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.pNext = nullptr; + submit_info.waitSemaphoreCount = 0u; + submit_info.pWaitSemaphores = nullptr; + submit_info.pWaitDstStageMask = nullptr; + submit_info.commandBufferCount = 1u; + submit_info.pCommandBuffers = &command_buffer; + submit_info.signalSemaphoreCount = 0u; + submit_info.pSignalSemaphores = nullptr; + result = init.disp.queueSubmit(queue, 1u, &submit_info, fence); + VERIFY_VK_RESULT("failed to submit command buffer", result); + result = init.disp.waitForFences(1u, &fence, VK_TRUE, UINT64_MAX); + VERIFY_VK_RESULT("failed to wait for fences", result); + } + + VkImageCopy2 image_copy; + image_copy.sType = VK_STRUCTURE_TYPE_IMAGE_COPY_2; + image_copy.pNext = nullptr; + image_copy.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_copy.srcSubresource.mipLevel = 0u; + image_copy.srcSubresource.baseArrayLayer = 0u; + image_copy.srcSubresource.layerCount = 1u; + image_copy.srcOffset = { 0, 0, 0 }; + image_copy.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_copy.dstSubresource.mipLevel = 0u; + image_copy.dstSubresource.baseArrayLayer = 0u; + image_copy.dstSubresource.layerCount = 1u; + image_copy.dstOffset = { 0, 0, 0 }; + image_copy.extent = { image_width, image_height, 1u }; + + VkCopyImageToImageInfoEXT copy_image_to_image_info; + copy_image_to_image_info.sType = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT; + copy_image_to_image_info.pNext = nullptr; + copy_image_to_image_info.flags = 0u; + copy_image_to_image_info.srcImage = src_image; + copy_image_to_image_info.srcImageLayout = VK_IMAGE_LAYOUT_GENERAL; + copy_image_to_image_info.dstImage = dst_image; + copy_image_to_image_info.dstImageLayout = VK_IMAGE_LAYOUT_GENERAL; + copy_image_to_image_info.regionCount = 1u; + copy_image_to_image_info.pRegions = &image_copy; + result = init.disp.copyImageToImageEXT(©_image_to_image_info); + VERIFY_VK_RESULT("failed to copy image to image", result); + + if (memory_to_image) + { + VkImageToMemoryCopyEXT image_to_memory_copy; + image_to_memory_copy.sType = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT; + image_to_memory_copy.pNext = nullptr; + image_to_memory_copy.pHostPointer = dst_data; + image_to_memory_copy.memoryRowLength = 0u; + image_to_memory_copy.memoryImageHeight = 0u; + image_to_memory_copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_to_memory_copy.imageSubresource.mipLevel = 0u; + image_to_memory_copy.imageSubresource.baseArrayLayer = 0u; + image_to_memory_copy.imageSubresource.layerCount = 1u; + image_to_memory_copy.imageOffset = { 0, 0, 0 }; + image_to_memory_copy.imageExtent = { image_width, image_height, 1u }; + + VkCopyImageToMemoryInfoEXT copy_image_to_memory_info; + copy_image_to_memory_info.sType = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT; + copy_image_to_memory_info.pNext = nullptr; + copy_image_to_memory_info.flags = 0u; + copy_image_to_memory_info.srcImage = dst_image; + copy_image_to_memory_info.srcImageLayout = VK_IMAGE_LAYOUT_GENERAL; + copy_image_to_memory_info.regionCount = 1u; + copy_image_to_memory_info.pRegions = &image_to_memory_copy; + init.disp.copyImageToMemoryEXT(©_image_to_memory_info); + } + else + { + VkCommandBufferBeginInfo command_buffer_begin_info; + command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + command_buffer_begin_info.pNext = nullptr; + command_buffer_begin_info.flags = 0u; + command_buffer_begin_info.pInheritanceInfo = nullptr; + result = init.disp.beginCommandBuffer(command_buffer, &command_buffer_begin_info); + VERIFY_VK_RESULT("failed to begin command buffer", result); + + VkBufferImageCopy buffer_image_copy; + buffer_image_copy.bufferOffset = 0u; + buffer_image_copy.bufferRowLength = 0u; + buffer_image_copy.bufferImageHeight = 0u; + buffer_image_copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + buffer_image_copy.imageSubresource.mipLevel = 0u; + buffer_image_copy.imageSubresource.baseArrayLayer = 0u; + buffer_image_copy.imageSubresource.layerCount = 1u; + buffer_image_copy.imageOffset = { 0, 0, 0 }; + buffer_image_copy.imageExtent = { image_width, image_height, 1u }; + init.disp.cmdCopyImageToBuffer( + command_buffer, dst_image, VK_IMAGE_LAYOUT_GENERAL, dst_buffer, 1u, &buffer_image_copy); + + result = init.disp.endCommandBuffer(command_buffer); + VERIFY_VK_RESULT("failed to end command buffer", result); + + result = init.disp.resetFences(1u, &fence); + VERIFY_VK_RESULT("failed to reset fence", result); + + VkSubmitInfo submit_info; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.pNext = nullptr; + submit_info.waitSemaphoreCount = 0u; + submit_info.pWaitSemaphores = nullptr; + submit_info.pWaitDstStageMask = nullptr; + submit_info.commandBufferCount = 1u; + submit_info.pCommandBuffers = &command_buffer; + submit_info.signalSemaphoreCount = 0u; + submit_info.pSignalSemaphores = nullptr; + result = init.disp.queueSubmit(queue, 1u, &submit_info, fence); + VERIFY_VK_RESULT("failed to submit command buffer", result); + result = init.disp.waitForFences(1u, &fence, VK_TRUE, UINT64_MAX); + VERIFY_VK_RESULT("failed to wait for fence", result); + } + + if (memcmp(src_data, dst_data, buffer_size) != 0) + { + throw std::runtime_error("memory does not match"); + } + + init.disp.unmapMemory(src_buffer_memory); + init.disp.unmapMemory(dst_buffer_memory); + + return frame_num < 1; +} + +void App::cleanup() +{ + init.disp.destroyBuffer(src_buffer, nullptr); + init.disp.freeMemory(src_buffer_memory, nullptr); + init.disp.destroyBuffer(dst_buffer, nullptr); + init.disp.freeMemory(dst_buffer_memory, nullptr); + init.disp.destroyImage(src_image, nullptr); + init.disp.freeMemory(src_image_memory, nullptr); + init.disp.destroyImage(dst_image, nullptr); + init.disp.freeMemory(dst_image_memory, nullptr); + + init.disp.destroyCommandPool(command_pool, nullptr); + init.disp.destroyFence(fence, nullptr); +} + +void App::setup() +{ + auto transfer_queue = init.device.get_queue(gfxrecon::test::QueueType::transfer); + if (!transfer_queue.has_value()) + throw std::runtime_error("could not get graphics queue"); + this->queue = *transfer_queue; + this->queue_index = init.device.get_queue_index(gfxrecon::test::QueueType::transfer).value(); + + create_buffers_and_images(); + allocate_command_buffer(); + + VkFenceCreateInfo fence_create_info; + fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fence_create_info.pNext = nullptr; + fence_create_info.flags = 0u; + init.disp.createFence(&fence_create_info, nullptr, &fence); +} + +GFXRECON_END_NAMESPACE(host_image_copy) + +GFXRECON_END_NAMESPACE(test_app) + +GFXRECON_END_NAMESPACE(gfxrecon) + +int main(int argc, char* argv[]) +{ + try + { + gfxrecon::test_app::host_image_copy::App app{}; + app.run("pipeline binaries"); + return 0; + } + catch (std::exception e) + { + std::cout << e.what() << std::endl; + return -1; + } +} diff --git a/test/test_apps/pipeline-binaries/CMakeLists.txt b/test/test_apps/pipeline-binaries/CMakeLists.txt new file mode 100644 index 0000000000..a5c983c0b5 --- /dev/null +++ b/test/test_apps/pipeline-binaries/CMakeLists.txt @@ -0,0 +1,76 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for pipeline-binaries test app +############################################################################### + +add_executable(gfxrecon-testapp-pipeline-binaries "") + +target_sources(gfxrecon-testapp-pipeline-binaries + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) + +target_include_directories(gfxrecon-testapp-pipeline-binaries PUBLIC + ${CMAKE_BINARY_DIR} + ${CMAKE_CURRENT_LIST_DIR}/../common) + +target_link_libraries(gfxrecon-testapp-pipeline-binaries + gfxrecon_application + gfxrecon_decode + gfxrecon_graphics + gfxrecon_format + gfxrecon_util + SDL3::SDL3 + platform_specific) + +if (MSVC) + # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. + # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. + if(CMAKE_SIZEOF_VOID_P EQUAL 4) + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") + else() + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") + endif() +endif() + +common_build_directives(gfxrecon-testapp-pipeline-binaries) + +add_custom_command( + TARGET gfxrecon-testapp-pipeline-binaries + POST_BUILD + COMMAND + ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} + DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) +if (WIN32) +add_custom_command(TARGET gfxrecon-testapp-pipeline-binaries POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) +endif () + +install(TARGETS gfxrecon-testapp-pipeline-binaries RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + +add_dependencies(gfxrecon-testapps gfxrecon-testapp-pipeline-binaries) \ No newline at end of file diff --git a/test/test_apps/pipeline-binaries/app.cpp b/test/test_apps/pipeline-binaries/app.cpp new file mode 100644 index 0000000000..63d470055a --- /dev/null +++ b/test/test_apps/pipeline-binaries/app.cpp @@ -0,0 +1,319 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include + +#include + +#include + +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test_app) + +GFXRECON_BEGIN_NAMESPACE(pipeline_binaries) + +class App : public gfxrecon::test::TestAppBase +{ + public: + App() = default; + + private: + VkPhysicalDeviceDynamicRenderingFeatures dynamic_rendering_features; + VkPhysicalDevicePipelineBinaryFeaturesKHR pipeline_binary_features; + + VkQueue graphics_queue; + + VkPipelineLayout pipeline_layout; + VkPipeline graphics_pipeline; + + void configure_instance_builder(test::InstanceBuilder& instance_builder); + void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector); + + void configure_device_builder(test::DeviceBuilder& device_builder, + test::PhysicalDevice const& physical_device) override; + void create_graphics_pipeline(); + void cleanup() override; + bool frame(const int frame_num) override; + void setup() override; +}; + +void App::configure_instance_builder(test::InstanceBuilder& instance_builder) +{ + instance_builder.require_api_version(VK_API_VERSION_1_1); +} + +void App::configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector) +{ + phys_device_selector.add_required_extension("VK_KHR_multiview"); + phys_device_selector.add_required_extension("VK_KHR_maintenance2"); + phys_device_selector.add_required_extension("VK_KHR_create_renderpass2"); + phys_device_selector.add_required_extension("VK_KHR_depth_stencil_resolve"); + phys_device_selector.add_required_extension("VK_KHR_dynamic_rendering"); + phys_device_selector.add_required_extension("VK_KHR_maintenance5"); + phys_device_selector.add_required_extension("VK_KHR_pipeline_binary"); +} + +void App::configure_device_builder(test::DeviceBuilder& device_builder, test::PhysicalDevice const& physical_device) +{ + dynamic_rendering_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES; + dynamic_rendering_features.pNext = nullptr; + dynamic_rendering_features.dynamicRendering = VK_TRUE; + device_builder.add_pNext(&dynamic_rendering_features); + + pipeline_binary_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR; + pipeline_binary_features.pNext = nullptr; + pipeline_binary_features.pipelineBinaries = VK_TRUE; + device_builder.add_pNext(&pipeline_binary_features); +} + +void App::create_graphics_pipeline() +{ + auto vert_module = gfxrecon::test::readShaderFromFile(init.disp, "vert.spv"); + auto frag_module = gfxrecon::test::readShaderFromFile(init.disp, "frag.spv"); + + VkPipelineShaderStageCreateInfo vert_stage_info = {}; + vert_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vert_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; + vert_stage_info.module = vert_module; + vert_stage_info.pName = "main"; + + VkPipelineShaderStageCreateInfo frag_stage_info = {}; + frag_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + frag_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + frag_stage_info.module = frag_module; + frag_stage_info.pName = "main"; + + VkPipelineShaderStageCreateInfo shader_stages[] = { vert_stage_info, frag_stage_info }; + + VkPipelineVertexInputStateCreateInfo vertex_input_info = {}; + vertex_input_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_input_info.vertexBindingDescriptionCount = 0; + vertex_input_info.vertexAttributeDescriptionCount = 0; + + VkPipelineInputAssemblyStateCreateInfo input_assembly = {}; + input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_assembly.primitiveRestartEnable = VK_FALSE; + + VkViewport viewport = {}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor = {}; + scissor.offset = { 0, 0 }; + scissor.extent = init.swapchain.extent; + + VkPipelineViewportStateCreateInfo viewport_state = {}; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = &viewport; + viewport_state.scissorCount = 1; + viewport_state.pScissors = &scissor; + + VkPipelineRasterizationStateCreateInfo rasterizer = {}; + rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterizer.depthClampEnable = VK_FALSE; + rasterizer.rasterizerDiscardEnable = VK_FALSE; + rasterizer.polygonMode = VK_POLYGON_MODE_FILL; + rasterizer.lineWidth = 1.0f; + rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; + rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterizer.depthBiasEnable = VK_FALSE; + + VkPipelineMultisampleStateCreateInfo multisampling = {}; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; + + VkPipelineColorBlendAttachmentState colorBlendAttachment = {}; + colorBlendAttachment.colorWriteMask = + VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + colorBlendAttachment.blendEnable = VK_FALSE; + + VkPipelineColorBlendStateCreateInfo color_blending = {}; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &colorBlendAttachment; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; + + VkPipelineLayoutCreateInfo pipeline_layout_info = {}; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = 0; + pipeline_layout_info.pushConstantRangeCount = 0; + + auto result = init.disp.createPipelineLayout(&pipeline_layout_info, nullptr, &this->pipeline_layout); + VERIFY_VK_RESULT("failed to create pipeline layout", result); + + std::vector dynamic_states = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; + + VkPipelineDynamicStateCreateInfo dynamic_info = {}; + dynamic_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_info.dynamicStateCount = static_cast(dynamic_states.size()); + dynamic_info.pDynamicStates = dynamic_states.data(); + + VkPipelineCreateFlags2CreateInfoKHR pipeline_create_flags_2_create_info; + pipeline_create_flags_2_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR; + pipeline_create_flags_2_create_info.pNext = NULL; + pipeline_create_flags_2_create_info.flags = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR; + + VkFormat color_format = VK_FORMAT_R8G8B8A8_UNORM; + + VkPipelineRenderingCreateInfo pipeline_rendering_create_info = {}; + pipeline_rendering_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO; + pipeline_rendering_create_info.pNext = &pipeline_create_flags_2_create_info; + pipeline_rendering_create_info.colorAttachmentCount = 1u; + pipeline_rendering_create_info.pColorAttachmentFormats = &color_format; + + VkGraphicsPipelineCreateInfo pipeline_info = {}; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.pNext = &pipeline_rendering_create_info; + pipeline_info.stageCount = 2; + pipeline_info.pStages = shader_stages; + pipeline_info.pVertexInputState = &vertex_input_info; + pipeline_info.pInputAssemblyState = &input_assembly; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &rasterizer; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_info; + pipeline_info.layout = this->pipeline_layout; + pipeline_info.renderPass = VK_NULL_HANDLE; + pipeline_info.subpass = 0; + pipeline_info.basePipelineHandle = VK_NULL_HANDLE; + + result = init.disp.createGraphicsPipelines(VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &this->graphics_pipeline); + VERIFY_VK_RESULT("failed to create graphics pipeline", result); + + init.disp.destroyShaderModule(frag_module, nullptr); + init.disp.destroyShaderModule(vert_module, nullptr); +} + +bool App::frame(const int frame_num) +{ + VkResult result = VK_SUCCESS; + + VkPipelineBinaryCreateInfoKHR pipeline_binary_create_info; + pipeline_binary_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR; + pipeline_binary_create_info.pNext = nullptr; + pipeline_binary_create_info.pKeysAndDataInfo = nullptr; + pipeline_binary_create_info.pipeline = graphics_pipeline; + pipeline_binary_create_info.pPipelineCreateInfo = nullptr; + + VkPipelineBinaryHandlesInfoKHR pipeline_binary_handles_info; + pipeline_binary_handles_info.sType = VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR; + pipeline_binary_handles_info.pNext = nullptr; + pipeline_binary_handles_info.pipelineBinaryCount = 0u; + pipeline_binary_handles_info.pPipelineBinaries = nullptr; + result = init.disp.createPipelineBinariesKHR(&pipeline_binary_create_info, nullptr, &pipeline_binary_handles_info); + VERIFY_VK_RESULT("failed to get pipeline binary count", result); + + std::vector pipeline_binaries(pipeline_binary_handles_info.pipelineBinaryCount); + pipeline_binary_handles_info.pPipelineBinaries = pipeline_binaries.data(); + result = init.disp.createPipelineBinariesKHR(&pipeline_binary_create_info, nullptr, &pipeline_binary_handles_info); + VERIFY_VK_RESULT("failed to create pipeline binaries", result); + + for (uint32_t i = 0; i < pipeline_binary_handles_info.pipelineBinaryCount; ++i) + { + VkPipelineBinaryKeyKHR pipeline_binary_key; + pipeline_binary_key.sType = VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR; + pipeline_binary_key.pNext = nullptr; + pipeline_binary_key.keySize = 0u; + + VkPipelineBinaryDataInfoKHR pipelineBinaryDataInfo; + pipelineBinaryDataInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR; + pipelineBinaryDataInfo.pNext = nullptr; + pipelineBinaryDataInfo.pipelineBinary = pipeline_binaries[i]; + + size_t pipelineBinaryDataSize; + result = init.disp.getPipelineBinaryDataKHR( + &pipelineBinaryDataInfo, &pipeline_binary_key, &pipelineBinaryDataSize, nullptr); + VERIFY_VK_RESULT("failed to get pipeline binary data size", result); + + std::vector binary_data(pipelineBinaryDataSize); + result = init.disp.getPipelineBinaryDataKHR( + &pipelineBinaryDataInfo, &pipeline_binary_key, &pipelineBinaryDataSize, binary_data.data()); + VERIFY_VK_RESULT("failed to get pipeline binary data", result); + } + + VkReleaseCapturedPipelineDataInfoKHR release_captured_pipeline_data_info; + release_captured_pipeline_data_info.sType = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR; + release_captured_pipeline_data_info.pNext = NULL; + release_captured_pipeline_data_info.pipeline = graphics_pipeline; + result = init.disp.releaseCapturedPipelineDataKHR(&release_captured_pipeline_data_info, nullptr); + + for (uint32_t i = 0; i < pipeline_binary_handles_info.pipelineBinaryCount; ++i) + { + init.disp.destroyPipelineBinaryKHR(pipeline_binaries[i], nullptr); + } + + return false; +} + +void App::cleanup() +{ + init.disp.destroyPipeline(this->graphics_pipeline, nullptr); + init.disp.destroyPipelineLayout(this->pipeline_layout, nullptr); +} + +void App::setup() +{ + auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); + if (!graphics_queue.has_value()) + throw std::runtime_error("could not get graphics queue"); + this->graphics_queue = *graphics_queue; + + create_graphics_pipeline(); +} + +GFXRECON_END_NAMESPACE(pipeline_binaries) + +GFXRECON_END_NAMESPACE(test_app) + +GFXRECON_END_NAMESPACE(gfxrecon) + +int main(int argc, char* argv[]) +{ + try + { + gfxrecon::test_app::pipeline_binaries::App app{}; + app.run("pipeline binaries"); + return 0; + } + catch (std::exception e) + { + std::cout << e.what() << std::endl; + return -1; + } +} diff --git a/test/test_apps/pipeline-binaries/shaders/frag.spv b/test/test_apps/pipeline-binaries/shaders/frag.spv new file mode 100644 index 0000000000000000000000000000000000000000..1331eb4cda17af9c3384bd5c049b00369d68a009 GIT binary patch literal 608 zcmYk2-Acni5QWF4X=7{uEa;t7ycCKTDuPr|k=z98&jTzmSv3-qkfefleKw!U8^Lqd zM6xiMowMKWoQc&s=!$HJBLkVr-i9y>RhEyZ#pOoGCeZxa_M@v zD#$Wbl%KIyag=BkMmebHLz8nFT$BXyDr^Eah^9ANY~wFol{aae=CB0CHh!OT|D_gP z+~OR21h80hrn;VD8qTwCCKI#Y!M+1+hS&wqxmmOS3||MCe~$WjRkH^*@;zkotjYJ0 z@w<#po;5vCJ48)(h!9y{o?-_2VJ|Me@eRdBUO?sB0|f8iL*?C3r6K)m<4R+XnwwM_ b-@k)<`rk(IK6#$g#|64_9-jTcrd2B5XJ94y1WEL1biSCP<((ZDi9N+2EkO5O+ZNa+SJ{OHMP5Gi%~!MpPBeq z`NhQZYkN0)H}b+^)X{=X537hJyWhZQx{{*lxY>->zsDlS>9{!>~5o& zF^xiqW>%G~rtE7FCB$p84cQY}L%+KGABmf0!gw8j|KyFIg@Ygs^3cyd2fZ-$`yaoA z-8@4xiN-rR@;Yz*?qJ|&`HwjC<0uLJUX)4|eGAczkLaSL8(;KB1&$P>JvqiPm(<6g zpGA4pPqdYordg8j_Tzpk!qL30@OkVXhv`|E=HYiiu_aGTuKI-7u|m%|6nvEAr_oP! zM@^YycI6J@XmA*1d60C&VUANIcM-?KoTKyN{8;qEIZ9JD^FHa(5wmc%pVHv`fZi3q z6X(jarb;*D=noEWb@T;C50)c_%DTzPb!82TXhu)^%WB4f=F?fe1mvVQA^&H zEva6Qtia?Jgn2ctjI~+e3B}Ye3s1^Z1Lq2}H{XMQQDN>Aj(YsGjAijm34d?tZ1~o! z3=O>1Tvn|0+G^$m;4sTwQA|zAwTd3*h4~(O^2~Tuo<8gh3yR_F42xABo*BTrztI`4 z2^;fE{j`eO>q2n!jamLig>BCAH%GMO;Ms3k#ecfg(2WZc1RdGc^8 zid=bHLk{C^R^^%5l`$81O$NtZugkl#nv8wGcVsN;@5+;dryjgfapdnSrVs8A+?MCt z^a9?lFlS9YnD>c>{4;s#$?xec@CKQgh32mqNB)%%Z;HDm=59E*mr~Q)JsExNOHEG? zWa#11&>#N~_z%*s=Vryp4Y{p~W9DO3F~=hrT5{YocgH@=`dG$!a%S`eMsNFis@V3$ YtUEG#heyNQyE1s{IUDdlo%(axfA`;bD*ylh literal 0 HcmV?d00001 From 8cc5d1618a4161155735878d30e52a5c27954651 Mon Sep 17 00:00:00 2001 From: ziga-lunarg Date: Sat, 2 Nov 2024 23:40:28 +0100 Subject: [PATCH 37/70] Add test for shader objects --- test/test_apps/CMakeLists.txt | 1 + test/test_apps/shader-objects/CMakeLists.txt | 76 +++ test/test_apps/shader-objects/app.cpp | 484 ++++++++++++++++++ .../test_apps/shader-objects/shaders/frag.spv | Bin 0 -> 608 bytes .../shader-objects/shaders/green.fspv | Bin 0 -> 420 bytes .../shader-objects/shaders/green.gspv | Bin 0 -> 1484 bytes .../shader-objects/shaders/green.tcspv | Bin 0 -> 1844 bytes .../shader-objects/shaders/green.tespv | Bin 0 -> 2092 bytes .../shader-objects/shaders/green.vspv | Bin 0 -> 1188 bytes .../test_apps/shader-objects/shaders/vert.spv | Bin 0 -> 1540 bytes 10 files changed, 561 insertions(+) create mode 100644 test/test_apps/shader-objects/CMakeLists.txt create mode 100644 test/test_apps/shader-objects/app.cpp create mode 100644 test/test_apps/shader-objects/shaders/frag.spv create mode 100644 test/test_apps/shader-objects/shaders/green.fspv create mode 100644 test/test_apps/shader-objects/shaders/green.gspv create mode 100644 test/test_apps/shader-objects/shaders/green.tcspv create mode 100644 test/test_apps/shader-objects/shaders/green.tespv create mode 100644 test/test_apps/shader-objects/shaders/green.vspv create mode 100644 test/test_apps/shader-objects/shaders/vert.spv diff --git a/test/test_apps/CMakeLists.txt b/test/test_apps/CMakeLists.txt index ce838135dd..e5e44fe9a2 100644 --- a/test/test_apps/CMakeLists.txt +++ b/test/test_apps/CMakeLists.txt @@ -32,3 +32,4 @@ add_subdirectory(triangle) add_subdirectory(multisample-depth) add_subdirectory(pipeline-binaries) add_subdirectory(host-image-copy) +add_subdirectory(shader-objects) diff --git a/test/test_apps/shader-objects/CMakeLists.txt b/test/test_apps/shader-objects/CMakeLists.txt new file mode 100644 index 0000000000..fff7bcdae4 --- /dev/null +++ b/test/test_apps/shader-objects/CMakeLists.txt @@ -0,0 +1,76 @@ +############################################################################### +# Copyright (c) 2018-2020 LunarG, Inc. +# Copyright (c) 2020-2023 Advanced Micro Devices, Inc. +# All rights reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# Author: LunarG Team +# Author: AMD Developer Tools Team +# Description: CMake script for shader-objects test app +############################################################################### + +add_executable(gfxrecon-testapp-shader-objects "") + +target_sources(gfxrecon-testapp-shader-objects + PRIVATE + ${CMAKE_CURRENT_LIST_DIR}/app.cpp + ${CMAKE_CURRENT_LIST_DIR}/../common/test_app_base.cpp) + +target_include_directories(gfxrecon-testapp-shader-objects PUBLIC + ${CMAKE_BINARY_DIR} + ${CMAKE_CURRENT_LIST_DIR}/../common) + +target_link_libraries(gfxrecon-testapp-shader-objects + gfxrecon_application + gfxrecon_decode + gfxrecon_graphics + gfxrecon_format + gfxrecon_util + SDL3::SDL3 + platform_specific) + +if (MSVC) + # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. + # On 32-bit windows, MSVC prefixes symbols with "_" but on 64-bit windows it doesn't. + if (CMAKE_SIZEOF_VOID_P EQUAL 4) + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:_gfxrecon_disable_popup_result") + else () + target_link_options(gfxrecon-replay PUBLIC "LINKER:/Include:gfxrecon_disable_popup_result") + endif () +endif () + +common_build_directives(gfxrecon-testapp-shader-objects) + +add_custom_command( + TARGET gfxrecon-testapp-shader-objects + POST_BUILD + COMMAND + ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_LIST_DIR}/shaders ${CMAKE_CURRENT_BINARY_DIR} + DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_BUILD_TYPE}) +if (WIN32) + add_custom_command(TARGET gfxrecon-testapp-shader-objects POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMAND_EXPAND_LISTS) +endif () + +install(TARGETS gfxrecon-testapp-shader-objects RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + +add_dependencies(gfxrecon-testapps gfxrecon-testapp-shader-objects) \ No newline at end of file diff --git a/test/test_apps/shader-objects/app.cpp b/test/test_apps/shader-objects/app.cpp new file mode 100644 index 0000000000..0db265a381 --- /dev/null +++ b/test/test_apps/shader-objects/app.cpp @@ -0,0 +1,484 @@ +/* +** Copyright (c) 2018-2023 Valve Corporation +** Copyright (c) 2018-2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include + +#include + +#include + +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) + +GFXRECON_BEGIN_NAMESPACE(test_app) + +GFXRECON_BEGIN_NAMESPACE(shader_objects) + +const int MAX_FRAMES_IN_FLIGHT = 2; + +class App : public gfxrecon::test::TestAppBase +{ + public: + App() = default; + + private: + VkQueue graphics_queue; + VkQueue present_queue; + + VkCommandPool command_pool; + VkCommandBuffer command_buffers[MAX_FRAMES_IN_FLIGHT]; + + VkShaderEXT shaders[5]; + + size_t current_frame = 0; + + gfxrecon::test::Sync sync; + + VkPhysicalDeviceDynamicRenderingFeatures dynamic_rendering_features; + VkPhysicalDeviceShaderObjectFeaturesEXT shader_object_features; + VkPhysicalDeviceFeatures2 features2; + + void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector); + + void configure_device_builder(test::DeviceBuilder& device_builder, + test::PhysicalDevice const& physical_device) override; + + void create_shader_objects(); + void cleanup() override; + bool frame(const int frame_num) override; + void setup() override; +}; + +void App::configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector) +{ + phys_device_selector.add_required_extension("VK_KHR_multiview"); + phys_device_selector.add_required_extension("VK_KHR_maintenance2"); + phys_device_selector.add_required_extension("VK_KHR_create_renderpass2"); + phys_device_selector.add_required_extension("VK_KHR_depth_stencil_resolve"); + phys_device_selector.add_required_extension("VK_KHR_dynamic_rendering"); + phys_device_selector.add_required_extension("VK_EXT_shader_object"); +} + +void App::configure_device_builder(test::DeviceBuilder& device_builder, test::PhysicalDevice const& physical_device) +{ + dynamic_rendering_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES; + dynamic_rendering_features.pNext = nullptr; + dynamic_rendering_features.dynamicRendering = VK_TRUE; + device_builder.add_pNext(&dynamic_rendering_features); + + shader_object_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT; + shader_object_features.pNext = nullptr; + shader_object_features.shaderObject = VK_TRUE; + device_builder.add_pNext(&shader_object_features); + + features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + features2.pNext = nullptr; + features2.features.tessellationShader = VK_TRUE; + features2.features.geometryShader = VK_TRUE; + device_builder.add_pNext(&features2); +} + +void App::create_shader_objects() +{ + auto vert_shader = gfxrecon::test::readFile("green.vspv"); + auto tesc_shader = gfxrecon::test::readFile("green.tcspv"); + auto tese_shader = gfxrecon::test::readFile("green.tespv"); + auto geom_shader = gfxrecon::test::readFile("green.gspv"); + auto frag_shader = gfxrecon::test::readFile("green.fspv"); + + VkShaderCreateInfoEXT shader_create_infos[5]; + shader_create_infos[0].sType = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT; + shader_create_infos[0].pNext = nullptr; + shader_create_infos[0].flags = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT; + shader_create_infos[0].stage = VK_SHADER_STAGE_VERTEX_BIT; + shader_create_infos[0].nextStage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; + shader_create_infos[0].codeType = VK_SHADER_CODE_TYPE_SPIRV_EXT; + shader_create_infos[0].codeSize = vert_shader.size(); + shader_create_infos[0].pCode = vert_shader.data(); + shader_create_infos[0].pName = "main"; + shader_create_infos[0].setLayoutCount = 0u; + shader_create_infos[0].pSetLayouts = nullptr; + shader_create_infos[0].pushConstantRangeCount = 0u; + shader_create_infos[0].pPushConstantRanges = nullptr; + shader_create_infos[0].pSpecializationInfo = nullptr; + + shader_create_infos[1].sType = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT; + shader_create_infos[1].pNext = nullptr; + shader_create_infos[1].flags = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT; + shader_create_infos[1].stage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; + shader_create_infos[1].nextStage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; + shader_create_infos[1].codeType = VK_SHADER_CODE_TYPE_SPIRV_EXT; + shader_create_infos[1].codeSize = tesc_shader.size(); + shader_create_infos[1].pCode = tesc_shader.data(); + shader_create_infos[1].pName = "main"; + shader_create_infos[1].setLayoutCount = 0u; + shader_create_infos[1].pSetLayouts = nullptr; + shader_create_infos[1].pushConstantRangeCount = 0u; + shader_create_infos[1].pPushConstantRanges = nullptr; + shader_create_infos[1].pSpecializationInfo = nullptr; + + shader_create_infos[2].sType = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT; + shader_create_infos[2].pNext = nullptr; + shader_create_infos[2].flags = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT; + shader_create_infos[2].stage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; + shader_create_infos[2].nextStage = VK_SHADER_STAGE_GEOMETRY_BIT; + shader_create_infos[2].codeType = VK_SHADER_CODE_TYPE_SPIRV_EXT; + shader_create_infos[2].codeSize = tese_shader.size(); + shader_create_infos[2].pCode = tese_shader.data(); + shader_create_infos[2].pName = "main"; + shader_create_infos[2].setLayoutCount = 0u; + shader_create_infos[2].pSetLayouts = nullptr; + shader_create_infos[2].pushConstantRangeCount = 0u; + shader_create_infos[2].pPushConstantRanges = nullptr; + shader_create_infos[2].pSpecializationInfo = nullptr; + + shader_create_infos[3].sType = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT; + shader_create_infos[3].pNext = nullptr; + shader_create_infos[3].flags = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT; + shader_create_infos[3].stage = VK_SHADER_STAGE_GEOMETRY_BIT; + shader_create_infos[3].nextStage = VK_SHADER_STAGE_FRAGMENT_BIT; + shader_create_infos[3].codeType = VK_SHADER_CODE_TYPE_SPIRV_EXT; + shader_create_infos[3].codeSize = geom_shader.size(); + shader_create_infos[3].pCode = geom_shader.data(); + shader_create_infos[3].pName = "main"; + shader_create_infos[3].setLayoutCount = 0u; + shader_create_infos[3].pSetLayouts = nullptr; + shader_create_infos[3].pushConstantRangeCount = 0u; + shader_create_infos[3].pPushConstantRanges = nullptr; + shader_create_infos[3].pSpecializationInfo = nullptr; + + shader_create_infos[4].sType = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT; + shader_create_infos[4].pNext = nullptr; + shader_create_infos[4].flags = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT; + shader_create_infos[4].stage = VK_SHADER_STAGE_FRAGMENT_BIT; + shader_create_infos[4].nextStage = 0u; + shader_create_infos[4].codeType = VK_SHADER_CODE_TYPE_SPIRV_EXT; + shader_create_infos[4].codeSize = frag_shader.size(); + shader_create_infos[4].pCode = frag_shader.data(); + shader_create_infos[4].pName = "main"; + shader_create_infos[4].setLayoutCount = 0u; + shader_create_infos[4].pSetLayouts = nullptr; + shader_create_infos[4].pushConstantRangeCount = 0u; + shader_create_infos[4].pPushConstantRanges = nullptr; + shader_create_infos[4].pSpecializationInfo = nullptr; + init.disp.createShadersEXT(5u, shader_create_infos, nullptr, shaders); +} + +const int NUM_FRAMES = 20; +#define IS_RUNNING(frame_num) frame_num < NUM_FRAMES; + +bool App::frame(const int frame_num) +{ + init.disp.waitForFences(1, &this->sync.in_flight_fences[this->current_frame], VK_TRUE, UINT64_MAX); + + uint32_t image_index = 0; + VkResult result = init.disp.acquireNextImageKHR( + init.swapchain, UINT64_MAX, this->sync.available_semaphores[this->current_frame], VK_NULL_HANDLE, &image_index); + + if (result == VK_ERROR_OUT_OF_DATE_KHR) + { + recreate_swapchain(true); + return IS_RUNNING(frame_num); + } + else if (result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) + { + throw gfxrecon::test::vulkan_exception("failed to acquire next image", result); + } + + if (this->sync.image_in_flight[image_index] != VK_NULL_HANDLE) + { + init.disp.waitForFences(1, &this->sync.image_in_flight[image_index], VK_TRUE, UINT64_MAX); + } + this->sync.image_in_flight[image_index] = this->sync.in_flight_fences[this->current_frame]; + + VkCommandBuffer command_buffer = command_buffers[this->current_frame]; + + init.disp.resetCommandBuffer(command_buffer, 0u); + + VkCommandBufferBeginInfo command_buffer_begin_info; + command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + command_buffer_begin_info.pNext = nullptr; + command_buffer_begin_info.flags = 0u; + command_buffer_begin_info.pInheritanceInfo = nullptr; + init.disp.beginCommandBuffer(command_buffer, &command_buffer_begin_info); + + VkImageMemoryBarrier pre_image_memory_barrier; + pre_image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + pre_image_memory_barrier.pNext = nullptr; + pre_image_memory_barrier.srcAccessMask = VK_ACCESS_NONE; + pre_image_memory_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + pre_image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; + pre_image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + pre_image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + pre_image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + pre_image_memory_barrier.image = init.swapchain_images[image_index]; + pre_image_memory_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + pre_image_memory_barrier.subresourceRange.baseMipLevel = 0u; + pre_image_memory_barrier.subresourceRange.levelCount = 1u; + pre_image_memory_barrier.subresourceRange.baseArrayLayer = 0u; + pre_image_memory_barrier.subresourceRange.layerCount = 1u; + init.disp.cmdPipelineBarrier(command_buffer, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + 0u, + 0u, + nullptr, + 0u, + nullptr, + 1u, + &pre_image_memory_barrier); + + { + VkViewport viewport; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)init.swapchain.extent.width; + viewport.height = (float)init.swapchain.extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + init.disp.cmdSetViewportWithCountEXT(command_buffer, 1u, &viewport); + + VkRect2D scissor; + scissor.offset.x = 0; + scissor.offset.y = 0; + scissor.extent = init.swapchain.extent; + init.disp.cmdSetScissorWithCountEXT(command_buffer, 1u, &scissor); + + init.disp.cmdSetTessellationDomainOriginEXT(command_buffer, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT); + init.disp.cmdSetPatchControlPointsEXT(command_buffer, 4u); + init.disp.cmdSetRasterizerDiscardEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetCullModeEXT(command_buffer, VK_CULL_MODE_NONE); + init.disp.cmdSetDepthTestEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetDepthWriteEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetStencilTestEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetDepthBiasEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetPolygonModeEXT(command_buffer, VK_POLYGON_MODE_FILL); + init.disp.cmdSetRasterizationSamplesEXT(command_buffer, VK_SAMPLE_COUNT_1_BIT); + init.disp.cmdSetAlphaToCoverageEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetPrimitiveTopologyEXT(command_buffer, VK_PRIMITIVE_TOPOLOGY_PATCH_LIST); + init.disp.cmdSetPrimitiveRestartEnableEXT(command_buffer, VK_FALSE); + init.disp.cmdSetVertexInputEXT(command_buffer, 0u, NULL, 0u, NULL); + + VkSampleMask sampleMask = 0xFFFFFFFF; + init.disp.cmdSetSampleMaskEXT(command_buffer, VK_SAMPLE_COUNT_1_BIT, &sampleMask); + + VkBool32 colorBlendEnable = VK_FALSE; + init.disp.cmdSetColorBlendEnableEXT(command_buffer, 0u, 1u, &colorBlendEnable); + + VkColorComponentFlags colorWriteMask = + VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + init.disp.cmdSetColorWriteMaskEXT(command_buffer, 0u, 1u, &colorWriteMask); + } + + VkShaderStageFlagBits stages[5] = { + VK_SHADER_STAGE_VERTEX_BIT, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + VK_SHADER_STAGE_GEOMETRY_BIT, + VK_SHADER_STAGE_FRAGMENT_BIT, + }; + init.disp.cmdBindShadersEXT(command_buffer, 5u, stages, shaders); + + VkClearValue clear_value; + clear_value.color.float32[0] = 0.0f; + clear_value.color.float32[1] = 0.0f; + clear_value.color.float32[2] = 0.4f; + clear_value.color.float32[3] = 1.0f; + + VkRenderingAttachmentInfo color_attachment; + color_attachment.sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO; + color_attachment.pNext = nullptr; + color_attachment.imageView = init.swapchain_image_views[image_index]; + color_attachment.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_attachment.resolveMode = VK_RESOLVE_MODE_NONE; + color_attachment.resolveImageView = VK_NULL_HANDLE; + color_attachment.resolveImageLayout = VK_IMAGE_LAYOUT_UNDEFINED; + color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + color_attachment.clearValue = clear_value; + + VkRenderingInfo rendering_info; + rendering_info.sType = VK_STRUCTURE_TYPE_RENDERING_INFO; + rendering_info.pNext = nullptr; + rendering_info.flags = 0u; + rendering_info.renderArea.offset = { 0, 0 }; + rendering_info.renderArea.extent = init.swapchain.extent; + rendering_info.layerCount = 1u; + rendering_info.viewMask = 0x0; + rendering_info.colorAttachmentCount = 1u; + rendering_info.pColorAttachments = &color_attachment; + rendering_info.pDepthAttachment = nullptr; + rendering_info.pStencilAttachment = nullptr; + + init.disp.cmdBeginRenderingKHR(command_buffer, &rendering_info); + init.disp.cmdDraw(command_buffer, 4u, 1u, 0u, 0u); + init.disp.cmdEndRenderingKHR(command_buffer); + + VkImageMemoryBarrier post_image_memory_barrier; + post_image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + post_image_memory_barrier.pNext = nullptr; + post_image_memory_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + post_image_memory_barrier.dstAccessMask = VK_ACCESS_NONE; + post_image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + post_image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + post_image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + post_image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + post_image_memory_barrier.image = init.swapchain_images[image_index]; + post_image_memory_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + post_image_memory_barrier.subresourceRange.baseMipLevel = 0u; + post_image_memory_barrier.subresourceRange.levelCount = 1u; + post_image_memory_barrier.subresourceRange.baseArrayLayer = 0u; + post_image_memory_barrier.subresourceRange.layerCount = 1u; + init.disp.cmdPipelineBarrier(command_buffer, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + 0u, + 0u, + nullptr, + 0u, + nullptr, + 1u, + &post_image_memory_barrier); + + init.disp.endCommandBuffer(command_buffer); + + VkSubmitInfo submitInfo = {}; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + + VkSemaphore wait_semaphores[] = { this->sync.available_semaphores[this->current_frame] }; + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT }; + submitInfo.waitSemaphoreCount = 1; + submitInfo.pWaitSemaphores = wait_semaphores; + submitInfo.pWaitDstStageMask = wait_stages; + + submitInfo.commandBufferCount = 1; + submitInfo.pCommandBuffers = &command_buffer; + + VkSemaphore signal_semaphores[] = { this->sync.finished_semaphore[this->current_frame] }; + submitInfo.signalSemaphoreCount = 1; + submitInfo.pSignalSemaphores = signal_semaphores; + + init.disp.resetFences(1, &this->sync.in_flight_fences[this->current_frame]); + + result = + init.disp.queueSubmit(this->graphics_queue, 1, &submitInfo, this->sync.in_flight_fences[this->current_frame]); + VERIFY_VK_RESULT("failed to submit queue", result); + + VkPresentInfoKHR present_info = {}; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + + present_info.waitSemaphoreCount = 1; + present_info.pWaitSemaphores = signal_semaphores; + + VkSwapchainKHR swapChains[] = { init.swapchain }; + present_info.swapchainCount = 1; + present_info.pSwapchains = swapChains; + + present_info.pImageIndices = &image_index; + + result = init.disp.queuePresentKHR(this->present_queue, &present_info); + if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) + { + recreate_swapchain(true); + return frame_num >= NUM_FRAMES; + } + VERIFY_VK_RESULT("failed to present queue", result); + + this->current_frame = (this->current_frame + 1) % MAX_FRAMES_IN_FLIGHT; + + return IS_RUNNING(frame_num); +} + +void App::cleanup() +{ + for (size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + init.disp.destroySemaphore(this->sync.finished_semaphore[i], nullptr); + init.disp.destroySemaphore(this->sync.available_semaphores[i], nullptr); + init.disp.destroyFence(this->sync.in_flight_fences[i], nullptr); + } + + init.disp.destroyCommandPool(command_pool, nullptr); + for (uint32_t i = 0; i < 5; ++i) + { + init.disp.destroyShaderEXT(shaders[i], nullptr); + } +} + +void App::setup() +{ + auto graphics_queue = init.device.get_queue(gfxrecon::test::QueueType::graphics); + if (!graphics_queue.has_value()) + throw std::runtime_error("could not get graphics queue"); + this->graphics_queue = *graphics_queue; + + auto present_queue = init.device.get_queue(gfxrecon::test::QueueType::present); + if (!present_queue.has_value()) + throw std::runtime_error("could not get present queue"); + this->present_queue = *present_queue; + + create_shader_objects(); + + auto queue_family_index = init.device.get_queue_index(gfxrecon::test::QueueType::graphics); + if (!queue_family_index) + throw std::runtime_error("could not find graphics queue"); + + this->sync = gfxrecon::test::create_sync_objects(init.swapchain, init.disp, MAX_FRAMES_IN_FLIGHT); + + VkCommandPoolCreateInfo command_pool_create_info; + command_pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + command_pool_create_info.pNext = nullptr; + command_pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + command_pool_create_info.queueFamilyIndex = queue_family_index.value(); + init.disp.createCommandPool(&command_pool_create_info, nullptr, &command_pool); + + VkCommandBufferAllocateInfo command_buffer_allocate_info; + command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + command_buffer_allocate_info.pNext = nullptr; + command_buffer_allocate_info.commandPool = command_pool; + command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + command_buffer_allocate_info.commandBufferCount = MAX_FRAMES_IN_FLIGHT; + init.disp.allocateCommandBuffers(&command_buffer_allocate_info, command_buffers); +} + +GFXRECON_END_NAMESPACE(shader_objects) + +GFXRECON_END_NAMESPACE(test_app) + +GFXRECON_END_NAMESPACE(gfxrecon) + +int main(int argc, char* argv[]) +{ + try + { + gfxrecon::test_app::shader_objects::App app{}; + app.run("triangle"); + return 0; + } + catch (std::exception e) + { + std::cout << e.what() << std::endl; + return -1; + } +} diff --git a/test/test_apps/shader-objects/shaders/frag.spv b/test/test_apps/shader-objects/shaders/frag.spv new file mode 100644 index 0000000000000000000000000000000000000000..1331eb4cda17af9c3384bd5c049b00369d68a009 GIT binary patch literal 608 zcmYk2-Acni5QWF4X=7{uEa;t7ycCKTDuPr|k=z98&jTzmSv3-qkfefleKw!U8^Lqd zM6xiMowMKWoQc&s=!$HJBLkVr-i9y>RhEyZ#pOoGCeZxa_M@v zD#$Wbl%KIyag=BkMmebHLz8nFT$BXyDr^Eah^9ANY~wFol{aae=CB0CHh!OT|D_gP z+~OR21h80hrn;VD8qTwCCKI#Y!M+1+hS&wqxmmOS3||MCe~$WjRkH^*@;zkotjYJ0 z@w<#po;5vCJ48)(h!9y{o?-_2VJ|Me@eRdBUO?sB0|f8iL*?C3r6K)m<4R+XnwwM_ b-@k)<`rk(IK6#$g#|64_9-jTc`qd;tcVL+3WathcN56M54cE@#f3BpF$&%F1N%X43Z1!C z$O+>)GtPM=uJd>$GLT5dc>ZG|4iteF*sPLmvfOw1a{aub<3?O-q~{hM51IJfnTmg0 z19$LH!*EnCCW`!^!IMx_qE(uzWTUgD(fjVJEOcqALg%I}vd$a}`i4E9sj~8r_YFQA z{hj38o7H9ABC9i>qh?HT0dIJ)*NClWr~_y*a1TFkCFk52ze8lcDfPEA=MHL5;Pi$7 pNgd z7wJo(=Qo=xL(mDsz4x5E_netcQk&RmI`_cMxLNnwRcp~r=;MS9G8fXKC(>EbaM0)-C;YM&DG_=ZkKB)r%V%>WDoybha63T;%C#HaO1) zWp<-dXoZu*T@HQeFAJf~(9eAY+!y7U|0!jjsW?1~`TOte?!KwtV$<7HX(lwzXI zdNZmYDo1}^^|T_-gKa89-)6wE6ZJh(G%p`VHSZVHF|iC^%B)^rnjx!<}r&p34EpZBH~RCY+P?w7<$}Ethc8c&h%a? YkM*eOvGcu(c+&$jy+1nRKkWnU|4;XGH~;_u literal 0 HcmV?d00001 diff --git a/test/test_apps/shader-objects/shaders/green.tcspv b/test/test_apps/shader-objects/shaders/green.tcspv new file mode 100644 index 0000000000000000000000000000000000000000..2de206d0f07eaf68489be6aba5b4456581be20b8 GIT binary patch literal 1844 zcmZ{j`)U(G6voHqR(r3#d8swEUR#Y-n<|LXVkHIwDOCJ7gk+J0brX_JP#?+1@TI(f z;P;!IDN7|24(FWjJGVVE$xZE)jJafH&AfSG(z9%)Oit#kDT*77LK{ITyvB-u4wM3+-WfQ7z~o&i(GX{act$C_J$#ygqCbP zd$M>KCuiX|eF-@!#}353jl%v(I854cCphQ01GrHXopXF6I~Cp=CCpnik5d1?nZP-= zcV;H=Ni)Gsnh9>wOx&^h*auLvH^uu&-vU1G0e?|7KjwL_ zFJ<=};y51TTfopBBL|FmJ(j-d^!?Gte=9sTCtDlrVr`$3tqqo~y`l}$e_JRx_L_E3 zlrBrCS(dLqp0&YWIiL3mzvtp-WOG(kg`VrG$ZpCKYA!ij%I4rNOX#^N{)#m5VDwy( zW+rfY;&T^Z^dxpoV$9JI@$0hS!2L{DW&0WF=l|36yWwKN$lsJEADn*p+^_HFcf!2B z{%yx^DE^LYdh@@Ar{7%({poj48Xjy*8|IDp95{ry2hwnUu7|Rj0i3z;`CoxihdaGk z$M=6Mt}LN$MVi%nGKw}`1=lHAJ3dkZTzY}w` zN1usG|MaPMGmzLnPI z7Z#tAF_=YJ<=l|#Rkm|Q+0kKK*Q9mM1+s^-VdWCL>S5LyUlK0phZ?^XD7rrNtAzXe z;oeTAdVE}IrC*Y$lEn3>Qj42WHH}Xq@_N0zxL!@#H9w)G3|~`8J$lunEh}kXX#A8^ zK18k7R-@6Z$rWF4BbrZgJf^vE)Xw;889vF@P6=!6`>6RbYNpX=xvJRh;2FdktvHPv zb+J7e&z>_ruBQj_SM7i`ofvypY%7V6x8qiNSg%H>G46p_J4sGs=nTX&*vqVDYM<2u z&&c|Edf?B~L+m^~#Qv|RnBBGdWDg+J$#cHgfGpH1&W5_XQf%i$oM9ZA;FyhL4IF)q z>%7g*o1?aACJtu!wc*azwl#ijIKTF&6w`lNEIfEzs9ST8Sy4CWGJ~7YzAtJ9lOM?M z(@bypmE5;Ij>*52<>{BkT<&vb4Au{7W<516emUpx;m2jpeOsshq!1i$e^S41=00nt z6yfYGhZshmTbj{K9G&=q3=W;sxsMk5;@_4rs5`6qY+ciPPq8427u|i$3b=-GD>aT#l)2eV+?+7v{efMyL6k!nQulW2|4&`(Pbs!(h!HF?@sQ&0X@1U^buc zf$#H8@O{1u_+?e$u7+gXFL$`An%u>(41P@=sgG~@qr&8KKbC(*7@mCk<6CS(m>NIS zkJ@}!xqo=_(FbSw*M+HJ`8R~ghbJHXaF#zKObzqz2$ShW1up*2G-kz`FjeKIJVO1E88m3`Q7#+k+!@974?i=!b z4Q2V{z?+6GVKi*Yd=1<3iJ68SVK{1-hPT4#AjVq^HSfv-8MSyD_~iZ7U(>SeA0m^O Aj{pDw literal 0 HcmV?d00001 diff --git a/test/test_apps/shader-objects/shaders/green.vspv b/test/test_apps/shader-objects/shaders/green.vspv new file mode 100644 index 0000000000000000000000000000000000000000..b1fa9dcb8974ecb89acbf417b63409b197797e0e GIT binary patch literal 1188 zcmYk4UrQT76vfBHM5|S6t=g*9So_Zk(g#}#r4^~N76L|uzAlmsWnjBmvTM=egWyN& zLtpxV`cm-xc4ss<+|1l_&fI(F&em%iW5$e_Ni${Mn962NO^6ZJEZ3-g(%#OC-uBMB zwoo|_V1uJDc> zRFp!gn+6=ynNB}P>j9JB%~vs2OzZ!I;yWvZ7g+0-XJVc1^7< z`Jz<3ATeA^@wiqnd|vTHiC1;u$xRR7AAQVyz|c=?ZOO4MA3x9@o_bHTQjh*t^uOd| zG^=8`f6&0;)Lqfa9pI@;TzzL%9r{_7W7G|IRJa@Bjgr0NpO^8~F8N<;Pkw5@*E=u| zdc?s032A7hHZ$V8V@978Q+rCz94-|zgBg$g(JA6tk8#gT8v1+a@sMLqF*$zg3^^ak z!8jB0&U-xMT~G`*oOe+%ct?5Y@v)p*;GcCHdIzH}{F)pr=-0JxdB65Ko+(BLcPxzl sU&z6+fW6cnY((BscINp?juwtLVD5X$!OS=1yd!T%Z^Y>TE5ir*4VB1T2mk;8 literal 0 HcmV?d00001 diff --git a/test/test_apps/shader-objects/shaders/vert.spv b/test/test_apps/shader-objects/shaders/vert.spv new file mode 100644 index 0000000000000000000000000000000000000000..c4d99250a60989564248adf6aabd6d57a63b515a GIT binary patch literal 1540 zcmYk5>rd2B5XJ94y1WEL1biSCP<((ZDi9N+2EkO5O+ZNa+SJ{OHMP5Gi%~!MpPBeq z`NhQZYkN0)H}b+^)X{=X537hJyWhZQx{{*lxY>->zsDlS>9{!>~5o& zF^xiqW>%G~rtE7FCB$p84cQY}L%+KGABmf0!gw8j|KyFIg@Ygs^3cyd2fZ-$`yaoA z-8@4xiN-rR@;Yz*?qJ|&`HwjC<0uLJUX)4|eGAczkLaSL8(;KB1&$P>JvqiPm(<6g zpGA4pPqdYordg8j_Tzpk!qL30@OkVXhv`|E=HYiiu_aGTuKI-7u|m%|6nvEAr_oP! zM@^YycI6J@XmA*1d60C&VUANIcM-?KoTKyN{8;qEIZ9JD^FHa(5wmc%pVHv`fZi3q z6X(jarb;*D=noEWb@T;C50)c_%DTzPb!82TXhu)^%WB4f=F?fe1mvVQA^&H zEva6Qtia?Jgn2ctjI~+e3B}Ye3s1^Z1Lq2}H{XMQQDN>Aj(YsGjAijm34d?tZ1~o! z3=O>1Tvn|0+G^$m;4sTwQA|zAwTd3*h4~(O^2~Tuo<8gh3yR_F42xABo*BTrztI`4 z2^;fE{j`eO>q2n!jamLig>BCAH%GMO;Ms3k#ecfg(2WZc1RdGc^8 zid=bHLk{C^R^^%5l`$81O$NtZugkl#nv8wGcVsN;@5+;dryjgfapdnSrVs8A+?MCt z^a9?lFlS9YnD>c>{4;s#$?xec@CKQgh32mqNB)%%Z;HDm=59E*mr~Q)JsExNOHEG? zWa#11&>#N~_z%*s=Vryp4Y{p~W9DO3F~=hrT5{YocgH@=`dG$!a%S`eMsNFis@V3$ YtUEG#heyNQyE1s{IUDdlo%(axfA`;bD*ylh literal 0 HcmV?d00001 From db20e08b373dedf7366fbfc1951dbb2e136b3af5 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Fri, 8 Nov 2024 08:05:44 -0500 Subject: [PATCH 38/70] Change max frames to size_t --- test/test_apps/multisample-depth/app.cpp | 2 +- test/test_apps/shader-objects/app.cpp | 2 +- test/test_apps/triangle/app.cpp | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/test_apps/multisample-depth/app.cpp b/test/test_apps/multisample-depth/app.cpp index f31f00ab72..09380c3a23 100644 --- a/test/test_apps/multisample-depth/app.cpp +++ b/test/test_apps/multisample-depth/app.cpp @@ -35,7 +35,7 @@ GFXRECON_BEGIN_NAMESPACE(test_app) GFXRECON_BEGIN_NAMESPACE(multisample_depth) -const int MAX_FRAMES_IN_FLIGHT = 2; +const size_t MAX_FRAMES_IN_FLIGHT = 2; class App : public gfxrecon::test::TestAppBase { diff --git a/test/test_apps/shader-objects/app.cpp b/test/test_apps/shader-objects/app.cpp index 0db265a381..31949ab839 100644 --- a/test/test_apps/shader-objects/app.cpp +++ b/test/test_apps/shader-objects/app.cpp @@ -35,7 +35,7 @@ GFXRECON_BEGIN_NAMESPACE(test_app) GFXRECON_BEGIN_NAMESPACE(shader_objects) -const int MAX_FRAMES_IN_FLIGHT = 2; +const size_t MAX_FRAMES_IN_FLIGHT = 2; class App : public gfxrecon::test::TestAppBase { diff --git a/test/test_apps/triangle/app.cpp b/test/test_apps/triangle/app.cpp index b230b558ef..59236180b8 100644 --- a/test/test_apps/triangle/app.cpp +++ b/test/test_apps/triangle/app.cpp @@ -34,7 +34,7 @@ GFXRECON_BEGIN_NAMESPACE(test_app) GFXRECON_BEGIN_NAMESPACE(triangle) -const int MAX_FRAMES_IN_FLIGHT = 2; +const size_t MAX_FRAMES_IN_FLIGHT = 2; class App : public gfxrecon::test::TestAppBase { From 7fff54eccb6b65bd663a6935b4380268ad9162b9 Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Fri, 8 Nov 2024 08:08:32 -0500 Subject: [PATCH 39/70] Fix formatting in pipeline binaries test app --- test/test_apps/pipeline-binaries/app.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_apps/pipeline-binaries/app.cpp b/test/test_apps/pipeline-binaries/app.cpp index 63d470055a..499eb58e36 100644 --- a/test/test_apps/pipeline-binaries/app.cpp +++ b/test/test_apps/pipeline-binaries/app.cpp @@ -268,8 +268,8 @@ bool App::frame(const int frame_num) } VkReleaseCapturedPipelineDataInfoKHR release_captured_pipeline_data_info; - release_captured_pipeline_data_info.sType = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR; - release_captured_pipeline_data_info.pNext = NULL; + release_captured_pipeline_data_info.sType = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR; + release_captured_pipeline_data_info.pNext = NULL; release_captured_pipeline_data_info.pipeline = graphics_pipeline; result = init.disp.releaseCapturedPipelineDataKHR(&release_captured_pipeline_data_info, nullptr); From a2b2c8e46b6e25b3cafa1a75316c89b83d4cb9ce Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Fri, 8 Nov 2024 08:17:42 -0500 Subject: [PATCH 40/70] Mark configure methods as override in pipeline-binaries test app --- test/test_apps/pipeline-binaries/app.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_apps/pipeline-binaries/app.cpp b/test/test_apps/pipeline-binaries/app.cpp index 499eb58e36..a1a784cbce 100644 --- a/test/test_apps/pipeline-binaries/app.cpp +++ b/test/test_apps/pipeline-binaries/app.cpp @@ -49,8 +49,8 @@ class App : public gfxrecon::test::TestAppBase VkPipelineLayout pipeline_layout; VkPipeline graphics_pipeline; - void configure_instance_builder(test::InstanceBuilder& instance_builder); - void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector); + void configure_instance_builder(test::InstanceBuilder& instance_builder) override; + void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector) override; void configure_device_builder(test::DeviceBuilder& device_builder, test::PhysicalDevice const& physical_device) override; From 98bde8e9bf6c481c8acf74044e9d21f4c8c6105b Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Fri, 8 Nov 2024 08:47:29 -0500 Subject: [PATCH 41/70] Fix format issues in shader objects test app --- test/test_apps/shader-objects/app.cpp | 48 +++++++++++++-------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/test/test_apps/shader-objects/app.cpp b/test/test_apps/shader-objects/app.cpp index 31949ab839..cbbdbc40d8 100644 --- a/test/test_apps/shader-objects/app.cpp +++ b/test/test_apps/shader-objects/app.cpp @@ -49,7 +49,7 @@ class App : public gfxrecon::test::TestAppBase VkCommandPool command_pool; VkCommandBuffer command_buffers[MAX_FRAMES_IN_FLIGHT]; - VkShaderEXT shaders[5]; + VkShaderEXT shaders[5]; size_t current_frame = 0; @@ -301,35 +301,35 @@ bool App::frame(const int frame_num) init.disp.cmdBindShadersEXT(command_buffer, 5u, stages, shaders); VkClearValue clear_value; - clear_value.color.float32[0] = 0.0f; - clear_value.color.float32[1] = 0.0f; - clear_value.color.float32[2] = 0.4f; - clear_value.color.float32[3] = 1.0f; + clear_value.color.float32[0] = 0.0f; + clear_value.color.float32[1] = 0.0f; + clear_value.color.float32[2] = 0.4f; + clear_value.color.float32[3] = 1.0f; VkRenderingAttachmentInfo color_attachment; - color_attachment.sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO; - color_attachment.pNext = nullptr; - color_attachment.imageView = init.swapchain_image_views[image_index]; - color_attachment.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; - color_attachment.resolveMode = VK_RESOLVE_MODE_NONE; - color_attachment.resolveImageView = VK_NULL_HANDLE; + color_attachment.sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO; + color_attachment.pNext = nullptr; + color_attachment.imageView = init.swapchain_image_views[image_index]; + color_attachment.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_attachment.resolveMode = VK_RESOLVE_MODE_NONE; + color_attachment.resolveImageView = VK_NULL_HANDLE; color_attachment.resolveImageLayout = VK_IMAGE_LAYOUT_UNDEFINED; - color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; - color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; - color_attachment.clearValue = clear_value; + color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + color_attachment.clearValue = clear_value; VkRenderingInfo rendering_info; - rendering_info.sType = VK_STRUCTURE_TYPE_RENDERING_INFO; - rendering_info.pNext = nullptr; - rendering_info.flags = 0u; - rendering_info.renderArea.offset = { 0, 0 }; - rendering_info.renderArea.extent = init.swapchain.extent; - rendering_info.layerCount = 1u; - rendering_info.viewMask = 0x0; + rendering_info.sType = VK_STRUCTURE_TYPE_RENDERING_INFO; + rendering_info.pNext = nullptr; + rendering_info.flags = 0u; + rendering_info.renderArea.offset = { 0, 0 }; + rendering_info.renderArea.extent = init.swapchain.extent; + rendering_info.layerCount = 1u; + rendering_info.viewMask = 0x0; rendering_info.colorAttachmentCount = 1u; - rendering_info.pColorAttachments = &color_attachment; - rendering_info.pDepthAttachment = nullptr; - rendering_info.pStencilAttachment = nullptr; + rendering_info.pColorAttachments = &color_attachment; + rendering_info.pDepthAttachment = nullptr; + rendering_info.pStencilAttachment = nullptr; init.disp.cmdBeginRenderingKHR(command_buffer, &rendering_info); init.disp.cmdDraw(command_buffer, 4u, 1u, 0u, 0u); From 92e13267a5f2ff516c3f9090160e80ab50ff2f8f Mon Sep 17 00:00:00 2001 From: beau-lunarg Date: Fri, 8 Nov 2024 10:00:33 -0500 Subject: [PATCH 42/70] Add missing overrides in host image copy and shader objects test apps --- test/test_apps/host-image-copy/app.cpp | 2 +- test/test_apps/shader-objects/app.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_apps/host-image-copy/app.cpp b/test/test_apps/host-image-copy/app.cpp index 46c44d4769..bf109a12fb 100644 --- a/test/test_apps/host-image-copy/app.cpp +++ b/test/test_apps/host-image-copy/app.cpp @@ -63,7 +63,7 @@ class App : public gfxrecon::test::TestAppBase VkCommandBuffer command_buffer; VkFence fence; - void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector); + void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector) override; void configure_device_builder(test::DeviceBuilder& device_builder, test::PhysicalDevice const& physical_device) override; diff --git a/test/test_apps/shader-objects/app.cpp b/test/test_apps/shader-objects/app.cpp index cbbdbc40d8..ca23a6f867 100644 --- a/test/test_apps/shader-objects/app.cpp +++ b/test/test_apps/shader-objects/app.cpp @@ -59,7 +59,7 @@ class App : public gfxrecon::test::TestAppBase VkPhysicalDeviceShaderObjectFeaturesEXT shader_object_features; VkPhysicalDeviceFeatures2 features2; - void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector); + void configure_physical_device_selector(test::PhysicalDeviceSelector& phys_device_selector) override; void configure_device_builder(test::DeviceBuilder& device_builder, test::PhysicalDevice const& physical_device) override; From 7cba54a88d38411ac4143b4cf6f69faa6ea549c7 Mon Sep 17 00:00:00 2001 From: David Pinedo Date: Fri, 25 Oct 2024 13:33:13 -0600 Subject: [PATCH 43/70] dump resources: use scaled extent when a scaling up images --- framework/graphics/vulkan_resources_util.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/graphics/vulkan_resources_util.cpp b/framework/graphics/vulkan_resources_util.cpp index 0269588b20..c96c9f6705 100644 --- a/framework/graphics/vulkan_resources_util.cpp +++ b/framework/graphics/vulkan_resources_util.cpp @@ -2195,7 +2195,7 @@ VkResult VulkanResourcesUtil::BlitImage(VkImage image, create_info.flags = 0; create_info.imageType = type; create_info.format = dst_format; - create_info.extent = extent; + create_info.extent = (scale > 1.0f) ? scaled_extent : extent; create_info.mipLevels = mip_levels; create_info.arrayLayers = array_layers; create_info.samples = VK_SAMPLE_COUNT_1_BIT; From c606062d750df534e8056e0b02d5bc572394a370 Mon Sep 17 00:00:00 2001 From: Mark Young Date: Wed, 23 Oct 2024 13:32:50 -0400 Subject: [PATCH 44/70] dx12 codgen: Break depedence upon Vulkan/Khronos Remove the hard-coded dependency of the DX12 GFXR codegen upon our Vulkan and the Khronos registry edit functionality (which the DX side really doesn't use). This includes removing the dependency on the "base_generators" directory as well. NOTE: This did require copying a functionality (like write) from the Khronos source to maintain consistency (so I added the Khronos copyright notice to the dx12_generators/base_generator.py file). The reason behind this is there are big changes coming to the Vulkan side in preparation of supporting other Khronos APIs and these changes could break DX12. --- .../base_decoder_body_generator.py | 43 +- .../base_struct_decoders_body_generator.py | 8 +- ...se_struct_handle_mappers_body_generator.py | 306 ++--- ..._struct_handle_mappers_header_generator.py | 124 +- .../dx12_add_entries_header_generator.py | 3 +- .../dx12_api_call_encoders_body_generator.py | 2 +- ...dx12_api_call_encoders_header_generator.py | 3 +- .../dx12_base_decoder_body_generator.py | 254 ++++ .../dx12_generators/dx12_base_generator.py | 1038 ++++++++++++++++- ...x12_base_replay_consumer_body_generator.py | 62 + ...x12_base_struct_decoders_body_generator.py | 236 ++++ ...2_base_struct_decoders_header_generator.py | 113 ++ ...dx12_call_id_to_string_header_generator.py | 7 +- .../dx12_command_list_util_body_generator.py | 2 +- ...dx12_command_list_util_header_generator.py | 3 +- .../dx12_consumer_header_generator.py | 7 +- .../dx12_decoder_body_generator.py | 26 +- .../dx12_decoder_header_generator.py | 16 +- .../dx12_enum_to_json_header_generator.py | 3 +- .../dx12_enum_to_string_body_generator.py | 3 +- .../dx12_enum_to_string_header_generator.py | 3 +- .../dx12_json_consumer_body_generator.py | 3 +- .../dx12_json_consumer_header_generator.py | 3 +- .../dx12_replay_consumer_body_generator.py | 9 +- .../dx12_replay_consumer_header_generator.py | 3 +- .../dx12_state_table_header_generator.py | 3 +- .../dx12_struct_decoders_body_generator.py | 13 +- .../dx12_struct_decoders_forward_generator.py | 11 +- .../dx12_struct_decoders_header_generator.py | 13 +- ..._struct_decoders_to_json_body_generator.py | 3 +- ...truct_decoders_to_json_header_generator.py | 3 +- ...12_struct_object_mappers_body_generator.py | 331 +++++- ..._struct_object_mappers_header_generator.py | 125 +- .../dx12_struct_unwrappers_body_generator.py | 3 +- ...dx12_struct_unwrappers_header_generator.py | 3 +- .../dx12_struct_wrapper_body_generator.py | 3 +- .../dx12_struct_wrapper_header_generator.py | 3 +- .../dx12_wrapper_body_generator.py | 3 +- .../dx12_wrapper_creators_body_generator.py | 3 +- .../dx12_wrapper_creators_header_generator.py | 3 +- .../dx12_wrapper_header_generator.py | 3 +- .../generated/dx12_generators/gencode.py | 3 +- framework/generated/generate_dx12.py | 5 - .../vulkan_generators/base_generator.py | 54 +- .../vulkan_cpp_consumer_body_generator.py | 2 - 45 files changed, 2321 insertions(+), 549 deletions(-) create mode 100644 framework/generated/dx12_generators/dx12_base_decoder_body_generator.py create mode 100644 framework/generated/dx12_generators/dx12_base_replay_consumer_body_generator.py create mode 100644 framework/generated/dx12_generators/dx12_base_struct_decoders_body_generator.py create mode 100644 framework/generated/dx12_generators/dx12_base_struct_decoders_header_generator.py diff --git a/framework/generated/base_generators/base_decoder_body_generator.py b/framework/generated/base_generators/base_decoder_body_generator.py index d6e6b0ddb7..8731d3e29a 100644 --- a/framework/generated/base_generators/base_decoder_body_generator.py +++ b/framework/generated/base_generators/base_decoder_body_generator.py @@ -75,7 +75,7 @@ def gen_child_var_name(self, base_type): new_type_name = new_type_name.lower() return re.sub('xr_', '', new_type_name) - def make_cmd_body(self, return_type, name, values, dx12_method=False): + def make_cmd_body(self, return_type, name, values): """Generate C++ code for the decoder method body.""" preamble = '' main_body = '' @@ -120,27 +120,9 @@ def make_cmd_body(self, return_type, name, values, dx12_method=False): else: arg_names.append(value.name) - # Vulkan return is very simple. Value is only for Dx12 Method. - dx12_return_value = None - dx12_return_decode_type = None + # Vulkan return is very simple. if return_type and return_type != 'void': - if dx12_method: - dx12_return_value = self.get_return_value_info( - return_type, name - ) - dx12_return_decode_type = self.make_decoded_param_type( - dx12_return_value - ) - main_body += ' {} return_value;\n'.format( - dx12_return_decode_type - ) - - if dx12_return_decode_type == 'Decoded_{}'.format(return_type): - main_body += ' {} value_returned;\n'.format(return_type) - main_body += ' return_value.decoded_value = &value_returned;\n' - - else: - main_body += ' {} return_value;\n'.format(return_type) + main_body += ' {} return_value;\n'.format(return_type) # Blank line after declarations. if values or return_type: @@ -159,14 +141,9 @@ def make_cmd_body(self, return_type, name, values, dx12_method=False): self, value, preamble, main_body, epilogue ) if return_type and return_type != 'void': - if dx12_method: - preamble, main_body, epilogue = BaseDecoderBodyGenerator.make_decode_invocation( - self, dx12_return_value, preamble, main_body, epilogue - ) - else: - preamble, main_body, epilogue = BaseDecoderBodyGenerator.make_decode_invocation( - self, ValueInfo('return_value', return_type, return_type), preamble, main_body, epilogue - ) + preamble, main_body, epilogue = BaseDecoderBodyGenerator.make_decode_invocation( + self, ValueInfo('return_value', return_type, return_type), preamble, main_body, epilogue + ) # Blank line after Decode() method invocations. if values or return_type: @@ -175,13 +152,7 @@ def make_cmd_body(self, return_type, name, values, dx12_method=False): # Make the argument list for the API call arglist = ', '.join([arg_name for arg_name in arg_names]) if return_type and return_type != 'void': - if dx12_method and dx12_return_decode_type.find('Decoder') != -1: - arglist = ', '.join(['&return_value', arglist]) - else: - arglist = ', '.join(['return_value', arglist]) - - if dx12_method: - arglist = 'object_id, ' + arglist + arglist = ', '.join(['return_value', arglist]) if arglist[-2:] == ', ': arglist = arglist[:-2] diff --git a/framework/generated/base_generators/base_struct_decoders_body_generator.py b/framework/generated/base_generators/base_struct_decoders_body_generator.py index 9a0721edb2..fa550c5a50 100644 --- a/framework/generated/base_generators/base_struct_decoders_body_generator.py +++ b/framework/generated/base_generators/base_struct_decoders_body_generator.py @@ -185,15 +185,9 @@ def make_decode_invocation(self, name, value, preamble, main_body, epilogue): access_op = '->' if is_static_array: - array_dimension = '' - # dx12 treats 2d array as 1d array. EX: [8][2] -> [16], so dx12's 2d array needs *. - # But vk keeps 2d array. - if self.is_dx12_class() and value.array_dimension and value.array_dimension > 0: - array_dimension = '*' # The pointer decoder will write directly to the struct member's memory. - main_body += ' wrapper->{name}{}SetExternalMemory({}value->{name}, {arraylen});\n'.format( + main_body += ' wrapper->{name}{}SetExternalMemory(value->{name}, {arraylen});\n'.format( access_op, - array_dimension, name=value.name, arraylen=value.array_capacity ) diff --git a/framework/generated/base_generators/base_struct_handle_mappers_body_generator.py b/framework/generated/base_generators/base_struct_handle_mappers_body_generator.py index 70032fcd61..c2a1d9fdb5 100644 --- a/framework/generated/base_generators/base_struct_handle_mappers_body_generator.py +++ b/framework/generated/base_generators/base_struct_handle_mappers_body_generator.py @@ -30,74 +30,74 @@ class BaseStructHandleMappersBodyGenerator(): def endFile(self): platform_type = self.get_api_prefix() - if not self.is_dx12_class(): - if platform_type == 'Vulkan': - # Generate the pNext handle mapping code. - self.newline() - write( - 'void MapPNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table)', - file=self.outFile - ) - write('{', file=self.outFile) - write( - ' if ((value != nullptr) && (wrapper != nullptr))', - file=self.outFile - ) - write(' {', file=self.outFile) - write( - ' const VkBaseInStructure* base = reinterpret_cast(value);', - file=self.outFile - ) - write('', file=self.outFile) - write(' switch (base->sType)', file=self.outFile) - write(' {', file=self.outFile) - write(' default:', file=self.outFile) - write( - ' // TODO: Report or raise fatal error for unrecongized sType?', - file=self.outFile - ) - else: - # Generate the next handle mapping code. - self.newline() - write( - 'void MapNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table)', - file=self.outFile - ) - write('{', file=self.outFile) - write( - ' if ((value != nullptr) && (wrapper != nullptr))', - file=self.outFile - ) - write(' {', file=self.outFile) + + if platform_type == 'Vulkan': + # Generate the pNext handle mapping code. + self.newline() + write( + 'void MapPNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table)', + file=self.outFile + ) + write('{', file=self.outFile) + write( + ' if ((value != nullptr) && (wrapper != nullptr))', + file=self.outFile + ) + write(' {', file=self.outFile) + write( + ' const VkBaseInStructure* base = reinterpret_cast(value);', + file=self.outFile + ) + write('', file=self.outFile) + write(' switch (base->sType)', file=self.outFile) + write(' {', file=self.outFile) + write(' default:', file=self.outFile) + write( + ' // TODO: Report or raise fatal error for unrecongized sType?', + file=self.outFile + ) + else: + # Generate the next handle mapping code. + self.newline() + write( + 'void MapNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table)', + file=self.outFile + ) + write('{', file=self.outFile) + write( + ' if ((value != nullptr) && (wrapper != nullptr))', + file=self.outFile + ) + write(' {', file=self.outFile) + write( + ' const XrBaseInStructure* base = reinterpret_cast(value);', + file=self.outFile + ) + write('', file=self.outFile) + write(' switch (base->type)', file=self.outFile) + write(' {', file=self.outFile) + write(' default:', file=self.outFile) + write( + ' // TODO: Report or raise fatal error for unrecongized type?', + file=self.outFile + ) + + write(' break;', file=self.outFile) + for base_type in self.pnext_structs: + if base_type in self.structs_with_handles: write( - ' const XrBaseInStructure* base = reinterpret_cast(value);', + ' case {}:'.format(self.pnext_structs[base_type]), file=self.outFile ) - write('', file=self.outFile) - write(' switch (base->type)', file=self.outFile) - write(' {', file=self.outFile) - write(' default:', file=self.outFile) write( - ' // TODO: Report or raise fatal error for unrecongized type?', + ' MapStructHandles(reinterpret_cast(wrapper), object_info_table);' + .format(base_type), file=self.outFile ) - - write(' break;', file=self.outFile) - for base_type in self.pnext_structs: - if base_type in self.structs_with_handles: - write( - ' case {}:'.format(self.pnext_structs[base_type]), - file=self.outFile - ) - write( - ' MapStructHandles(reinterpret_cast(wrapper), object_info_table);' - .format(base_type), - file=self.outFile - ) - write(' break;', file=self.outFile) - write(' }', file=self.outFile) - write(' }', file=self.outFile) - write('}', file=self.outFile) + write(' break;', file=self.outFile) + write(' }', file=self.outFile) + write(' }', file=self.outFile) + write('}', file=self.outFile) # Generate handle adding functions for output structs with handles for struct in self.output_structs_with_handles: @@ -126,14 +126,6 @@ def endFile(self): def generate_feature(self): """Performs C++ code generation for the feature.""" - object_table_prefix = 'Common' - map_types = 'Handles' - map_table = '' - if self.is_dx12_class(): - object_table_prefix = 'Dx12' - map_types = 'Objects' - map_table = ', const graphics::Dx12GpuVaMap& gpu_va_map' - for struct in self.get_filtered_struct_names(): if ( (struct in self.structs_with_handles) @@ -170,8 +162,8 @@ def generate_feature(self): break body = '\n' - body += 'void MapStruct{}(Decoded_{}* wrapper, const {}ObjectInfoTable& object_info_table{})\n'.format( - map_types, struct, object_table_prefix, map_table + body += 'void MapStructHandles(Decoded_{}* wrapper, const CommonObjectInfoTable& object_info_table)\n'.format( + struct ) body += '{\n' @@ -198,26 +190,13 @@ def make_struct_handle_mappings( ): """Generating expressions for mapping struct handles read from the capture file to handles created at replay.""" prefix_from_type = self.get_prefix_from_type(name) - object_table_prefix = prefix_from_type - map_types = 'Objects' - map_type = 'Object' - base_type = 'object' - object_info_table_get = '' - given_object = ', gpu_va_map' - is_dx12_class = self.is_dx12_class() - if not is_dx12_class: - object_table_prefix = 'Common' - map_types = 'Handles' - map_type = 'Handle' - base_type = 'handle' - given_object = '' body = '' for member in handle_members: body += '\n' map_func = self.MAP_STRUCT_TYPE.get(member.base_type) - if ('pNext' == member.name or 'next' == member.name) and (not is_dx12_class): + if ('pNext' == member.name or 'next' == member.name): func_id = 'PNext' if 'next' == member.name: func_id = 'Next' @@ -248,57 +227,43 @@ def make_struct_handle_mappings( elif self.is_struct(member.base_type): # This is a struct that includes handles. if member.is_array: - body += ' MapStructArray{}(wrapper->{name}->GetMetaStructPointer(), wrapper->{name}->GetLength(), object_info_table{});\n'.format( - map_types, + body += ' MapStructArrayHandles(wrapper->{name}->GetMetaStructPointer(), wrapper->{name}->GetLength(), object_info_table);\n'.format( member.base_type, - given_object, name=member.name ) elif member.is_pointer: - body += ' MapStructArray{}(wrapper->{}->GetMetaStructPointer(), 1, object_info_table{});\n'.format( - map_types, member.base_type, member.name, given_object + body += ' MapStructArrayHandles(wrapper->{}->GetMetaStructPointer(), 1, object_info_table);\n'.format( + member.base_type, member.name ) else: - body += ' MapStruct{}(wrapper->{}, object_info_table{});\n'.format( - map_types, member.name, given_object + body += ' MapStructHandles(wrapper->{}, object_info_table);\n'.format( + member.name ) else: type = member.base_type - if not is_dx12_class: - prefix_from_type = self.get_prefix_from_type(member.base_type) - func_id = member.base_type + 'Info' - type = prefix_from_type + member.base_type[2:] + 'Info' - object_info_table_get = ', &{}ObjectInfoTable::Get{}'.format( - object_table_prefix, func_id - ) + prefix_from_type = self.get_prefix_from_type(member.base_type) + func_id = member.base_type + 'Info' + type = prefix_from_type + member.base_type[2:] + 'Info' + object_info_table_get = ', &CommonObjectInfoTable::Get{}'.format( + func_id + ) # If it is an array or pointer, map with the utility function. - if ( - member.is_array or ( - (not is_dx12_class and member.is_pointer) or - (is_dx12_class and member.pointer_count > 1) - ) - ): + if (member.is_array or member.is_pointer): if member.is_dynamic or member.is_pointer: - body += ' value->{name} = {}_mapping::Map{}Array<{type}>(&wrapper->{name}, object_info_table{});\n'.format( - base_type, - map_type, + body += ' value->{name} = handle_mapping::MapHandleArray<{type}>(&wrapper->{name}, object_info_table{});\n'.format( object_info_table_get, type=type, name=member.name ) else: - body += ' {}_mapping::Map{}Array<{type}>(&wrapper->{name}, object_info_table{});\n'.format( - base_type, - map_type, + body += ' handle_mapping::MapHandleArray<{type}>(&wrapper->{name}, object_info_table{});\n'.format( object_info_table_get, type=type, name=member.name ) else: - body += ' value->{name} = {}_mapping::Map{}<{type}>(wrapper->{name}, object_info_table{});\n'.format( - base_type, - map_type, + body += ' value->{name} = handle_mapping::MapHandle<{type}>(wrapper->{name}, object_info_table{});\n'.format( object_info_table_get, type=type, name=member.name @@ -306,9 +271,7 @@ def make_struct_handle_mappings( for member in generic_handle_members: body += '\n' - body += ' value->{name} = {}_mapping::Map{}(wrapper->{name}, value->{}, object_info_table);\n'.format( - base_type, - map_type, + body += ' value->{name} = handle_mapping::MapHandle(wrapper->{name}, value->{}, object_info_table);\n'.format( generic_handle_members[member], name=member ) @@ -317,24 +280,10 @@ def make_struct_handle_mappings( def make_struct_handle_additions(self, name, members): """Generating expressions for adding mappings for handles created at replay that are embedded in structs.""" - object_info_table_add = '' platform_type = self.get_api_prefix() - object_table_prefix = 'Common' - map_types = 'Handles' - map_type = 'Handle' - base_type = 'handle' - map_table = '' - is_dx12_class = self.is_dx12_class() - if is_dx12_class: - platform_type = 'Dx12' - object_table_prefix = platform_type - map_types = 'Objects' - map_type = 'Object' - base_type = 'object' - map_table = ', graphics::Dx12GpuVaMap* gpu_va_map' - - body = 'void AddStruct{}(format::HandleId parent_id, const Decoded_{name}* id_wrapper, const {name}* handle_struct, {}ObjectInfoTable* object_info_table{})\n'.format( - map_types, object_table_prefix, map_table, name=name + + body = 'void AddStructHandles(format::HandleId parent_id, const Decoded_{name}* id_wrapper, const {name}* handle_struct, CommonObjectInfoTable* object_info_table)\n'.format( + name=name ) body += '{\n' body += ' if (id_wrapper != nullptr)\n' @@ -342,7 +291,7 @@ def make_struct_handle_additions(self, name, members): for member in members: - if ('pNext' == member.name or 'next' == member.name) and (not is_dx12_class): + if ('pNext' == member.name or 'next' == member.name): func_id = 'PNext' if 'next' == member.name: func_id = 'Next' @@ -353,59 +302,46 @@ def make_struct_handle_additions(self, name, members): elif self.is_struct(member.base_type): # This is a struct that includes handles. if member.is_array: - body += ' AddStructArray{}(parent_id, id_wrapper->{name}->GetMetaStructPointer(), id_wrapper->{name}->GetLength(), handle_struct->{name}, static_cast(handle_struct->{length}), object_info_table);\n'.format( - map_types, + body += ' AddStructArrayHandles(parent_id, id_wrapper->{name}->GetMetaStructPointer(), id_wrapper->{name}->GetLength(), handle_struct->{name}, static_cast(handle_struct->{length}), object_info_table);\n'.format( member.base_type, name=member.name, length=member.array_length ) elif member.is_pointer: - body += ' AddStructArray{}(parent_id, id_wrapper->{name}->GetMetaStructPointer(), 1, handle_struct->{name}, 1, object_info_table);\n'.format( - map_types, member.base_type, name=member.name + body += ' AddStructArrayHandles(parent_id, id_wrapper->{name}->GetMetaStructPointer(), 1, handle_struct->{name}, 1, object_info_table);\n'.format( + member.base_type, name=member.name ) else: - body += ' AddStruct{}(parent_id, id_wrapper->{name}, &handle_struct->{name}, object_info_table);\n'.format( - map_types, name=member.name + body += ' AddStructHandles(parent_id, id_wrapper->{name}, &handle_struct->{name}, object_info_table);\n'.format( + name=member.name ) else: type = member.base_type - if not is_dx12_class: - func_id = member.base_type + 'Info' - type = platform_type + member.base_type[2:] + 'Info' - object_info_table_add = ', &{}ObjectInfoTable::Add{}'.format( - object_table_prefix, func_id - ) + func_id = member.base_type + 'Info' + type = platform_type + member.base_type[2:] + 'Info' + object_info_table_add = ', &CommonObjectInfoTable::Add{}'.format( + func_id + ) # If it is an array or pointer, add with the utility function. - if ( - member.is_array or ( - (not is_dx12_class and member.is_pointer) or - (is_dx12_class and member.pointer_count > 1) - ) - ): + if (member.is_array or member.is_pointer): if member.is_array: - body += ' {}_mapping::Add{}Array<{type}>(parent_id, id_wrapper->{name}.GetPointer(), id_wrapper->{name}.GetLength(), handle_struct->{name}, handle_struct->{length}, object_info_table{});\n'.format( - base_type, - map_type, + body += ' handle_mapping::AddHandleArray<{}>(parent_id, id_wrapper->{name}.GetPointer(), id_wrapper->{name}.GetLength(), handle_struct->{name}, handle_struct->{length}, object_info_table{});\n'.format( + type, object_info_table_add, - type=type, name=member.name, length=member.array_length ) else: - body += ' {}_mapping::Add{}Array<{type}>(parent_id, id_wrapper->{name}.GetPointer(), 1, handle_struct->{name}, 1, object_info_table{}});\n'.format( - base_type, - map_type, + body += ' handle_mapping::AddHandleArray<{}>(parent_id, id_wrapper->{name}.GetPointer(), 1, handle_struct->{name}, 1, object_info_table{}});\n'.format( + type, object_info_table_add, - type=type, name=member.name ) else: - body += ' {}_mapping::Add{}<{type}>(parent_id, id_wrapper->{name}, handle_struct->{name}, object_info_table{});\n'.format( - base_type, - map_type, + body += ' handle_mapping::AddHandle<{}>(parent_id, id_wrapper->{name}, handle_struct->{name}, object_info_table{});\n'.format( + type, object_info_table_add, - type=type, name=member.name ) @@ -418,11 +354,6 @@ def make_struct_handle_allocations(self, name, members): Determine if the struct only contains members that are structs that contain handles or static arrays of handles, and does not need a temporary variable referencing the struct value. """ - map_type = 'Object' - is_dx12_class = self.is_dx12_class() - if not is_dx12_class: - map_type = 'Handle' - needs_value_ptr = False for member in members: if self.is_handle( @@ -431,8 +362,8 @@ def make_struct_handle_allocations(self, name, members): needs_value_ptr = True break - body = 'void SetStruct{}Lengths(Decoded_{name}* wrapper)\n'.format( - map_type, name=name + body = 'void SetStructHandleLengths(Decoded_{name}* wrapper)\n'.format( + name=name ) body += '{\n' @@ -448,7 +379,7 @@ def make_struct_handle_allocations(self, name, members): body += '\n' for member in members: - if ('pNext' == member.name or 'next' == member.name) and (not is_dx12_class): + if ('pNext' == member.name or 'next' == member.name): func_id = 'PNext' if 'next' == member.name: func_id = 'Next' @@ -459,32 +390,27 @@ def make_struct_handle_allocations(self, name, members): elif self.is_struct(member.base_type): # This is a struct that includes handles. if member.is_array: - body += ' SetStructArray{}Lengths(wrapper->{name}->GetMetaStructPointer(), wrapper->{name}->GetLength());\n'.format( - map_type, member.base_type, name=member.name + body += ' SetStructArrayHandleLengths(wrapper->{name}->GetMetaStructPointer(), wrapper->{name}->GetLength());\n'.format( + member.base_type, name=member.name ) elif member.is_pointer: - body += ' SetStructArray{}Lengths(wrapper->{name}->GetMetaStructPointer(), 1);\n'.format( - map_type, member.base_type, name=member.name + body += ' SetStructArrayHandleLengths(wrapper->{name}->GetMetaStructPointer(), 1);\n'.format( + member.base_type, name=member.name ) else: - body += ' SetStruct{}Lengths(wrapper->{name});\n'.format( - map_type, name=member.name + body += ' SetStructHandleLengths(wrapper->{name});\n'.format( + name=member.name ) else: # If it is an array or pointer, add with the utility function. - if ( - member.is_array or ( - (not is_dx12_class and member.is_pointer) or - (is_dx12_class and member.pointer_count > 1) - ) - ): + if (member.is_array or member.is_pointer): if member.is_array: - body += ' wrapper->{name}.Set{}Length(wrapper->{name}.GetLength());\n'.format( - map_type, name=member.name + body += ' wrapper->{name}.SetHandleLength(wrapper->{name}.GetLength());\n'.format( + name=member.name ) else: - body += ' wrapper->{}.Set{}Length(1);\n'.format( - map_type, member.name + body += ' wrapper->{}.SetHandleLength(1);\n'.format( + member.name ) if member.is_dynamic or member.is_pointer: diff --git a/framework/generated/base_generators/base_struct_handle_mappers_header_generator.py b/framework/generated/base_generators/base_struct_handle_mappers_header_generator.py index 0e4ea172b5..d36628c867 100644 --- a/framework/generated/base_generators/base_struct_handle_mappers_header_generator.py +++ b/framework/generated/base_generators/base_struct_handle_mappers_header_generator.py @@ -30,143 +30,45 @@ class BaseStructHandleMappersHeaderGenerator(): def endFile(self): platform_type = self.get_api_prefix() - object_type = 'Common' - map_types = 'Handles' - map_type = 'Handle' - map_table = '' - map_object = '' - - is_dx12_class = self.is_dx12_class() - if is_dx12_class: - object_type = platform_type - map_types = 'Objects' - map_type = 'Object' - map_table = ', const graphics::Dx12GpuVaMap& gpu_va_map' - map_object = ', gpu_va_map' - - if not is_dx12_class: - self.newline() - if platform_type == 'Vulkan': - write( - f'void MapPNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table);', - file=self.outFile - ) - else: - write( - f'void MapNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table);', - file=self.outFile - ) self.newline() - - if is_dx12_class: - write('template ', file=self.outFile) + if platform_type == 'Vulkan': write( - 'void MapStructArray{}(T* structs, size_t len, const {}ObjectInfoTable& object_info_table{})' - .format(map_types, object_type, map_table), + f'void MapPNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table);', file=self.outFile ) - write('{', file=self.outFile) - write(' if (structs != nullptr)', file=self.outFile) - write(' {', file=self.outFile) - write(' for (size_t i = 0; i < len; ++i)', file=self.outFile) - write(' {', file=self.outFile) + else: write( - ' MapStruct{}(&structs[i], object_info_table{});'. - format(map_types, map_object), + f'void MapNextStructHandles(const void* value, void* wrapper, const CommonObjectInfoTable& object_info_table);', file=self.outFile ) - write(' }', file=self.outFile) - write(' }', file=self.outFile) - write('}', file=self.outFile) - self.newline() - for struct in self.output_structs_with_handles: - write( - 'void AddStruct{}(format::HandleId parent_id, const Decoded_{type}* id_wrapper, const {type}* handle_struct, {}ObjectInfoTable* object_info_table{});' - .format(map_types, object_type, map_table, type=struct), - file=self.outFile - ) - self.newline() + self.newline() - if is_dx12_class: - write('template ', file=self.outFile) - write( - 'void AddStructArray{}(format::HandleId parent_id, const T* id_wrappers, size_t id_len, const typename T::struct_type* handle_structs, size_t handle_len, {}ObjectInfoTable* object_info_table{})' - .format(map_types, object_type, map_table), - file=self.outFile - ) - write('{', file=self.outFile) - write( - ' if (id_wrappers != nullptr && handle_structs != nullptr)', - file=self.outFile - ) - write(' {', file=self.outFile) - write( - ' // TODO: Improved handling of array size mismatch.', - file=self.outFile - ) - write( - ' size_t len = std::min(id_len, handle_len);', - file=self.outFile - ) - write(' for (size_t i = 0; i < len; ++i)', file=self.outFile) - write(' {', file=self.outFile) + for struct in self.output_structs_with_handles: write( - ' AddStruct{}(parent_id, &id_wrappers[i], &handle_structs[i], object_info_table);' - .format(map_types), + 'void AddStructHandles(format::HandleId parent_id, const Decoded_{type}* id_wrapper, const {type}* handle_struct, CommonObjectInfoTable* object_info_table);' + .format(type=struct), file=self.outFile ) - write(' }', file=self.outFile) - write(' }', file=self.outFile) - write('}', file=self.outFile) self.newline() for struct in self.output_structs_with_handles: if struct in self.structs_with_handle_ptrs: write( - 'void SetStruct{map_type}Lengths(Decoded_{type}* wrapper);' - .format(map_type=map_type, type=struct), + 'void SetStructHandleLengths(Decoded_{type}* wrapper);' + .format(type=struct), file=self.outFile ) self.newline() - if is_dx12_class: - write('template ', file=self.outFile) - write( - 'void SetStructArray{}Lengths(T* wrappers, size_t len)'. - format(map_type), - file=self.outFile - ) - write('{', file=self.outFile) - write(' if (wrappers != nullptr)', file=self.outFile) - write(' {', file=self.outFile) - write(' for (size_t i = 0; i < len; ++i)', file=self.outFile) - write(' {', file=self.outFile) - write( - ' SetStruct{}Lengths(&wrappers[i]);'.format(map_type), - file=self.outFile - ) - write(' }', file=self.outFile) - write(' }', file=self.outFile) - write('}', file=self.outFile) - self.newline() - else: - write('#include "decode/common_struct_handle_mappers.h"', file=self.outFile) + write('#include "decode/common_struct_handle_mappers.h"', file=self.outFile) write('GFXRECON_END_NAMESPACE(decode)', file=self.outFile) write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) def generate_feature(self): """Performs C++ code generation for the feature.""" - object_table = 'Common' - map_type = 'Handles' - map_table = '' - if self.is_dx12_class(): - object_table = 'Dx12' - map_type = 'Objects' - map_table = ', const graphics::Dx12GpuVaMap& gpu_va_map' - for struct in self.get_filtered_struct_names(): if ( (struct in self.structs_with_handles) @@ -174,7 +76,7 @@ def generate_feature(self): or (struct in self.structs_with_map_data) ) and (struct not in self.STRUCT_MAPPERS_BLACKLIST): body = '\n' - body += 'void MapStruct{}(Decoded_{}* wrapper, const {}ObjectInfoTable& object_info_table{});'.format( - map_type, struct, object_table, map_table + body += 'void MapStructHandles(Decoded_{}* wrapper, const CommonObjectInfoTable& object_info_table);'.format( + struct ) write(body, file=self.outFile) diff --git a/framework/generated/dx12_generators/dx12_add_entries_header_generator.py b/framework/generated/dx12_generators/dx12_add_entries_header_generator.py index 67f44261ec..25edc29f9e 100644 --- a/framework/generated/dx12_generators/dx12_add_entries_header_generator.py +++ b/framework/generated/dx12_generators/dx12_add_entries_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates declarations for functions to add entries for DX12 state tracker diff --git a/framework/generated/dx12_generators/dx12_api_call_encoders_body_generator.py b/framework/generated/dx12_generators/dx12_api_call_encoders_body_generator.py index aa12755a01..be8b486ad0 100644 --- a/framework/generated/dx12_generators/dx12_api_call_encoders_body_generator.py +++ b/framework/generated/dx12_generators/dx12_api_call_encoders_body_generator.py @@ -22,7 +22,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write +from dx12_base_generator import write from dx12_api_call_encoders_header_generator import Dx12ApiCallEncodersHeaderGenerator diff --git a/framework/generated/dx12_generators/dx12_api_call_encoders_header_generator.py b/framework/generated/dx12_generators/dx12_api_call_encoders_header_generator.py index d3f093d267..438a1fd930 100644 --- a/framework/generated/dx12_generators/dx12_api_call_encoders_header_generator.py +++ b/framework/generated/dx12_generators/dx12_api_call_encoders_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write class Dx12ApiCallEncodersHeaderGenerator(Dx12BaseGenerator): diff --git a/framework/generated/dx12_generators/dx12_base_decoder_body_generator.py b/framework/generated/dx12_generators/dx12_base_decoder_body_generator.py new file mode 100644 index 0000000000..de7e747a77 --- /dev/null +++ b/framework/generated/dx12_generators/dx12_base_decoder_body_generator.py @@ -0,0 +1,254 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2018-2020 Valve Corporation +# Copyright (c) 2018-2020 LunarG, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +from dx12_base_generator import ValueInfo, write + + +class Dx12BaseDecoderBodyGenerator(): + """Base class for generating decoder body code.""" + + def generate_feature(self): + """Performs C++ code generation for the feature.""" + platform_type = self.get_api_prefix() + + first = True + for cmd in self.get_filtered_cmd_names(): + self.cmd_names.append(cmd) + + info = self.feature_cmd_params[cmd] + return_type = info[0] + values = info[2] + + cmddef = '' if first else '\n' + cmddef += 'size_t {}Decoder::Decode_{}(const ApiCallInfo& call_info, const uint8_t* parameter_buffer, size_t buffer_size)\n'.format( + platform_type, cmd + ) + cmddef += '{\n' + cmddef += ' size_t bytes_read = 0;\n' + cmddef += '\n' + cmddef += self.make_cmd_body(return_type, cmd, values) + cmddef += '\n' + cmddef += ' return bytes_read;\n' + cmddef += '}' + + write(cmddef, file=self.outFile) + first = False + + def make_cmd_body(self, return_type, name, values, dx12_method=False): + """Generate C++ code for the decoder method body.""" + body = '' + arg_names = [] + + # Declarations for decoded types. + for value in values: + decode_type = self.make_decoded_param_type(value) + body += ' {} {};\n'.format(decode_type, value.name) + + if decode_type == 'Decoded_{}'.format(value.base_type): + body += ' {} value_{};\n'.format( + value.base_type, value.name + ) + body += ' {0}.decoded_value = &value_{0};\n'.format( + value.name + ) + + if 'Decoder' in decode_type: + arg_names.append('&{}'.format(value.name)) + else: + arg_names.append(value.name) + + # Vulkan return is very simple. Value is only for Dx12 Method. + dx12_return_value = None + dx12_return_decode_type = None + if return_type and return_type != 'void': + if dx12_method: + dx12_return_value = self.get_return_value_info( + return_type, name + ) + dx12_return_decode_type = self.make_decoded_param_type( + dx12_return_value + ) + body += ' {} return_value;\n'.format( + dx12_return_decode_type + ) + + if dx12_return_decode_type == 'Decoded_{}'.format(return_type): + body += ' {} value_returned;\n'.format(return_type) + body += ' return_value.decoded_value = &value_returned;\n' + + else: + body += ' {} return_value;\n'.format(return_type) + + # Blank line after declarations. + if values or return_type: + body += '\n' + + # Decode() method calls for pointer decoder wrappers. + for value in values: + body += Dx12BaseDecoderBodyGenerator.make_decode_invocation( + self, value + ) + if return_type and return_type != 'void': + if dx12_method: + body += Dx12BaseDecoderBodyGenerator.make_decode_invocation( + self, dx12_return_value + ) + else: + body += Dx12BaseDecoderBodyGenerator.make_decode_invocation( + self, ValueInfo('return_value', return_type, return_type) + ) + + # Blank line after Decode() method invocations. + if values or return_type: + body += '\n' + + # Make the argument list for the API call + arglist = ', '.join([arg_name for arg_name in arg_names]) + if return_type and return_type != 'void': + if dx12_method and dx12_return_decode_type.find('Decoder') != -1: + arglist = ', '.join(['&return_value', arglist]) + else: + arglist = ', '.join(['return_value', arglist]) + + if dx12_method: + arglist = 'object_id, ' + arglist + + if arglist[-2:] == ', ': + arglist = arglist[:-2] + arglist = 'call_info, ' + arglist + + body += ' for (auto consumer : GetConsumers())\n' + body += ' {\n' + body += ' consumer->Process_{}({});\n'.format(name, arglist) + body += ' }\n' + + return body + + def make_decode_invocation(self, value): + """Generate parameter decode function/method invocation.""" + buffer_args = '(parameter_buffer + bytes_read), (buffer_size - bytes_read)' + body = '' + + is_struct = False + is_class = False + is_string = False + is_funcp = False + is_handle = False + + type_name = self.make_invocation_type_name(value.base_type) + + if self.is_struct(type_name): + is_struct = True + elif self.is_class(value): + is_class = True + elif type_name in ['String', 'WString']: + is_string = True + elif type_name == 'FunctionPtr': + is_funcp = True + elif self.is_handle(value.base_type): + is_handle = True + + # is_pointer will be False for static arrays. + if value.is_pointer or value.is_array: + if not is_class and type_name in self.EXTERNAL_OBJECT_TYPES and not value.is_array: + if value.pointer_count > 1: + # Pointer to a pointer to an unknown object type (void**), encoded as a pointer to a 64-bit integer ID. + body += ' bytes_read += {}.DecodeVoidPtr({});\n'.format( + value.name, buffer_args + ) + else: + # Pointer to an unknown object type, encoded as a 64-bit integer ID. + body += ' bytes_read += ValueDecoder::DecodeAddress({}, &{});\n'.format( + buffer_args, value.name + ) + else: + if is_struct or is_string or is_handle or ( + is_class and value.pointer_count > 1 + ): + body += ' bytes_read += {}.Decode({});\n'.format( + value.name, buffer_args + ) + elif is_class and value.pointer_count == 1: + body += ' bytes_read += ValueDecoder::DecodeHandleIdValue({}, &{});\n'.format( + buffer_args, value.name + ) + elif self.has_basetype(value.base_type): + base_type = self.get_basetype(value.base_type) + body += ' bytes_read += {}.Decode{}({});\n'.format( + value.name, self.encode_types[base_type], buffer_args + ) + else: + body += ' bytes_read += {}.Decode{}({});\n'.format( + value.name, type_name, buffer_args + ) + else: + if is_struct: + body += ' bytes_read += DecodeStruct({}, &{});\n'.format( + buffer_args, value.name + ) + elif is_funcp: + body += ' bytes_read += ValueDecoder::DecodeAddress({}, &{});\n'.format( + buffer_args, value.name + ) + elif is_handle: + body += ' bytes_read += ValueDecoder::DecodeHandleIdValue({}, &{});\n'.format( + buffer_args, value.name + ) + elif self.has_basetype(type_name) : + base_type = self.get_basetype(type_name) + body += ' bytes_read += ValueDecoder::Decode{}Value({}, &{});\n'.format( + self.encode_types[base_type], buffer_args, value.name + ) + else: + body += ' bytes_read += ValueDecoder::Decode{}Value({}, &{});\n'.format( + type_name, buffer_args, value.name + ) + + return body + + def generate_decode_cases(self): + prefix = self.get_api_prefix() + """Generate the (Platform)Decoder::DecodeFunctionCall method.""" + + body = f'void {prefix}Decoder::DecodeFunctionCall(format::ApiCallId call_id,\n' + body += ' const ApiCallInfo& call_info,\n' + body += ' const uint8_t* parameter_buffer,\n' + body += ' size_t buffer_size)\n' + body += '{\n' + body += ' switch(call_id)\n' + body += ' {\n' + body += ' default:\n' + body += f' {prefix}DecoderBase::DecodeFunctionCall(call_id, call_info, parameter_buffer, buffer_size);\n' + body += ' break;\n' + write(body, file=self.outFile) + + for cmd in self.cmd_names: + cmddef = ' case format::ApiCallId::ApiCall_{}:\n'.format(cmd) + cmddef += ' Decode_{}(call_info, parameter_buffer, buffer_size);\n'.format( + cmd + ) + cmddef += ' break;' + write(cmddef, file=self.outFile) + + write(' }', file=self.outFile) + write('}\n', file=self.outFile) diff --git a/framework/generated/dx12_generators/dx12_base_generator.py b/framework/generated/dx12_generators/dx12_base_generator.py index e4a6d0d2d9..cb0cd0fbbc 100644 --- a/framework/generated/dx12_generators/dx12_base_generator.py +++ b/framework/generated/dx12_generators/dx12_base_generator.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 # -# Copyright (c) 2021 LunarG, Inc. +# Copyright (c) 2013-2024 The Khronos Group Inc. +# Copyright (c) 2021-2024 LunarG, Inc. # Copyright (c) 2023 Advanced Micro Devices, Inc. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -21,12 +22,156 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. -import sys -from base_generator import BaseGenerator, BaseGeneratorOptions, ValueInfo - +import json,os,re,shutil,sys,tempfile + +from collections import OrderedDict + +try: + from pathlib import Path +except ImportError: + # For limited python 2 compat as used by some Vulkan consumers + from pathlib2 import Path # type: ignore + +def write(*args, **kwargs): + file = kwargs.pop('file', sys.stdout) + end = kwargs.pop('end', '\n') + file.write(' '.join(str(arg) for arg in args)) + file.write(end) + +def make_re_string(list, default=None): + """Turn a list of strings into a regexp string matching exactly those strings. + Pulled from Khronos genvk.py + """ + if (len(list) > 0) or (default is None): + return '^(' + '|'.join(list) + ')$' + else: + return default + +def remove_suffix(self: str, suffix: str, /) -> str: + # suffix='' should not call self[:-0]. + if suffix and self.endswith(suffix): + return self[:-len(suffix)] + else: + return self[:] + +def noneStr(s): + """Return string argument, or "" if argument is None. + + Used in converting etree Elements into text. + s - string to convert""" + if s: + return s + return "" + + +def enquote(s): + """Return string argument with surrounding quotes, + for serialization into Python code.""" + if s: + if isinstance(s, str): + return f"'{s}'" + else: + return s + return None + + +class MissingGeneratorOptionsError(RuntimeError): + """Error raised when a Generator tries to do something that requires GeneratorOptions but it is None.""" + + def __init__(self, msg=None): + full_msg = 'Missing generator options object self.genOpts' + if msg: + full_msg += f": {msg}" + super().__init__(full_msg) + + +class ValueInfo(): + """ValueInfo - Class to store parameter/struct member information. + Contains information descripting Vulkan API call parameters and struct members. + + Members: + name - Parameter/struct member name of the value. + base_type - Undecorated typename of the value. + full_type - Fully qualified typename of the value. + pointer_count - Number of '*' characters in the type declaration. + array_length - The parameter that specifies the number of elements in an array, or None if the value is not an array. + array_capacity - The max size of a statically allocated array, or None for a dynamically allocated array. + array_dimension - Number of the array dimension + platform_base_type - For platform specific type definitions, stores the original base_type declaration before platform to trace type substitution. + platform_full_type - For platform specific type definitions, stores the original full_type declaration before platform to trace type substitution. + bitfield_width - + is_pointer - True if the value is a pointer. + is_optional - True if the value is optional + is_array - True if the member is an array. + is_dynamic - True if the memory for the member is an array and it is dynamically allocated. + is_const - True if the member is a const. + """ -class Dx12GeneratorOptions(BaseGeneratorOptions): - """Options for generating C++ function declarations for Dx12 API.""" + def __init__( + self, + name, + base_type, + full_type, + pointer_count=0, + array_length=None, + array_length_value=None, + array_capacity=None, + array_dimension=None, + platform_base_type=None, + platform_full_type=None, + bitfield_width=None, + is_const=False, + is_optional=False, + is_com_outptr=False + ): + self.name = name + self.base_type = base_type + self.full_type = full_type + self.pointer_count = pointer_count + self.array_length = array_length + self.array_length_value = array_length_value + self.array_capacity = array_capacity + self.array_dimension = array_dimension + self.platform_base_type = platform_base_type + self.platform_full_type = platform_full_type + self.bitfield_width = bitfield_width + + self.is_pointer = True if pointer_count > 0 else False + self.is_optional = is_optional + self.is_array = True if array_length else False + self.is_dynamic = True if not array_capacity else False + self.is_const = is_const + self.is_com_outptr = is_com_outptr + +class Dx12GeneratorOptions(): + """Options for generating C++ function declarations for Dx12 API. + + Adds options used by FrameworkGenerator objects during C++ language + code generation. + + Additional members + blacklists - Path to JSON file listing apicalls and structs to ignore. + platform_types - Path to JSON file listing platform (WIN32, X11, etc.) + specific types that are defined outside of the Vulkan header. + + Additional members (from Khronos Registry COptionsGenerator) + prefix_text - list of strings to prefix generated header with + (usually a copyright statement + calling convention macros). + protect_file - True if multiple inclusion protection should be + generated (based on the filename) around the entire header. + apicall - string to use for the function declaration prefix, + such as APICALL on Windows. + apientry - string to use for the calling convention macro, + in typedefs, such as APIENTRY. + apientryp - string to use for the calling convention macro + in function pointer typedefs, such as APIENTRYP. + indent_func_proto - True if prototype declarations should put each + parameter on a separate line + indent_func_pointer - True if typedefed function pointers should put each + parameter on a separate line + align_func_param - if nonzero and parameters are being put on a + separate line, align parameter names at the specified column + """ def __init__( self, @@ -38,13 +183,40 @@ def __init__( protect_file=False, protect_feature=True ): - BaseGeneratorOptions.__init__( - self, blacklists, platform_types, filename, directory, prefix_text, - protect_file, protect_feature - ) - - -class Dx12BaseGenerator(BaseGenerator): + self.blacklists = blacklists + self.platform_types = platform_types + self.filename = filename + self.directory = directory + self.prefix_text = prefix_text + self.protect_file = protect_file + self.protect_feature = protect_feature + self.apicall = '' + self.apientry = '' + self.apientryp = '' + self.indent_func_proto = '' + self.align_func_param = 48 + self.code_generator = True + self.apiname = 'Dx12', + self.conventions = None + self.genpath = None + self.profile = None, + self.versions = None, + self.emitversions = None, + self.default_extensions = None, + self.add_extensions = None, + self.remove_extensions = None, + self.emit_extensions = None + + +class Dx12BaseGenerator(): + + NO_STRUCT_BREAKDOWN = [ + 'LARGE_INTEGER', + 'D3D12_AUTO_BREADCRUMB_NODE', + 'D3D12_AUTO_BREADCRUMB_NODE1', + 'D3D12_DRED_ALLOCATION_NODE', + 'D3D12_DRED_ALLOCATION_NODE1', + ] ARRAY_SIZE_LIST = [ ['D3D12_AUTO_BREADCRUMB_NODE', 'pCommandHistory', 'BreadcrumbCount'], @@ -166,23 +338,246 @@ def __init__( dx12_prefix_strings, err_file=sys.stderr, warn_file=sys.stderr, - diag_file=sys.stdout, - feature_break=True + diag_file=sys.stdout ): - BaseGenerator.__init__( - self, - process_cmds=True, - process_structs=True, - feature_break=feature_break, - err_file=err_file, - warn_file=warn_file, - diag_file=diag_file - ) + self.outFile = None + self.errFile = err_file + self.warnFile = warn_file + self.diagFile = diag_file + # Internal state + self.featureName = None + """The current feature name being generated.""" + + self.genOpts = None + """The GeneratorOptions subclass instance.""" + + self.registry = None + """The specification registry object.""" + + self.featureDictionary = {} + """The dictionary of dictionaries of API features.""" + + # Used for extension enum value generation + self.extBase = 1000000000 + self.extBlockSize = 1000 + self.madeDirs = {} + + # API dictionary, which may be loaded by the beginFile method of + # derived generators. + self.apidict = None + self.source_dict = source_dict self.dx12_prefix_strings = dx12_prefix_strings self.feature_method_params = dict() self.check_blacklist = False + self.structs_with_map_data = dict() + + self.MAP_STRUCT_TYPE = { + 'D3D12_GPU_DESCRIPTOR_HANDLE': [ + 'MapGpuDescriptorHandle', 'MapGpuDescriptorHandles', + 'descriptor_map' + ], + 'D3D12_GPU_VIRTUAL_ADDRESS': + ['MapGpuVirtualAddress', 'MapGpuVirtualAddresses', 'gpu_va_map'] + } + self.dx12_return_value = None + self.dx12_return_decode_type = None + + # These API calls should not be processed by the code generator. They require special implementations. + self.APICALL_BLACKLIST = [] + + self.APICALL_ENCODER_BLACKLIST = [] + + self.APICALL_DECODER_BLACKLIST = [] + + # These method calls should not be processed by the code generator. They require special implementations. + self.METHODCALL_BLACKLIST = [] + + # These structures should not be processed by the code generator. They require special implementations. + self.STRUCT_BLACKLIST = [] + + # These structures should be ignored for handle mapping/unwrapping. They require special implementations. + self.STRUCT_MAPPERS_BLACKLIST = [] + + # Platform specific basic types that have been defined extarnally to the Vulkan header. + self.PLATFORM_TYPES = {} + + # Platform specific structure types that have been defined extarnally to the Vulkan header. + self.PLATFORM_STRUCTS = [] + + self.GENERIC_HANDLE_APICALLS = {} + + self.GENERIC_HANDLE_STRUCTS = {} + + # These types represent pointers to non-Vulkan or non-Dx12 objects that were written as 64-bit address IDs. + self.EXTERNAL_OBJECT_TYPES = ['void', 'Void'] + + self.MAP_STRUCT_TYPE = { + 'D3D12_GPU_DESCRIPTOR_HANDLE': [ + 'MapGpuDescriptorHandle', 'MapGpuDescriptorHandles', + 'descriptor_map' + ], + 'D3D12_GPU_VIRTUAL_ADDRESS': + ['MapGpuVirtualAddress', 'MapGpuVirtualAddresses', 'gpu_va_map'] + } + + # Default C++ code indentation size. + self.INDENT_SIZE = 4 + + def __load_blacklists(self, filename): + lists = json.loads(open(filename, 'r').read()) + self.APICALL_BLACKLIST += lists['functions-all'] + self.APICALL_ENCODER_BLACKLIST += lists['functions-encoder'] + self.APICALL_DECODER_BLACKLIST += lists['functions-decoder'] + self.STRUCT_BLACKLIST += lists['structures'] + if 'classmethods' in lists: + for class_name, method_list in lists['classmethods'].items(): + for method_name in method_list: + self.METHODCALL_BLACKLIST.append( + class_name + '_' + method_name + ) + + def __load_platform_types(self, filename): + platforms = json.loads(open(filename, 'r').read()) + for platform_name in platforms: + platform = platforms[platform_name] + platform_types = platform['types'] + + for type in platform_types: + self.PLATFORM_TYPES[type] = platform_types[type] + + platform_structs = platform['structs'] + if platform_structs: + self.PLATFORM_STRUCTS += platform_structs + + def beginFile(self, gen_opts): + + self.genOpts = gen_opts + if self.genOpts is None: + raise MissingGeneratorOptionsError() + self.file_suffix = '' + + # Try to import the API dictionary, apimap.py, if it exists. Nothing + # in apimap.py cannot be extracted directly from the XML, and in the + # future we should do that. + if self.genOpts.genpath is not None: + try: + sys.path.insert(0, self.genOpts.genpath) + import apimap + self.apidict = apimap + except ImportError: + self.apidict = None + + # Open a temporary file for accumulating output. + if self.genOpts.filename is not None: + self.outFile = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8', newline='\n', delete=False) + else: + self.outFile = sys.stdout + + if gen_opts.blacklists: + self.__load_blacklists(gen_opts.blacklists) + if gen_opts.platform_types: + self.__load_platform_types(gen_opts.platform_types) + + # Platform defined struct processing must be implemented manually, + # so these structs will be added to the blacklist. + self.STRUCT_BLACKLIST += self.PLATFORM_STRUCTS + + # User-supplied prefix text, if any (list of strings) + if (gen_opts.prefix_text): + for s in gen_opts.prefix_text: + write(s, file=self.outFile) + + # Multiple inclusion protection & C++ wrappers. + if (gen_opts.protect_file and self.genOpts.filename): + header_sym = 'GFXRECON_' + os.path.basename(self.genOpts.filename).replace('.h', '_H').upper() + write('#ifndef ', header_sym, file=self.outFile) + write('#define ', header_sym, file=self.outFile) + self.newline() + + def endFile(self): + """Method override.""" + # Finish C++ wrapper and multiple inclusion protection + if (self.genOpts.protect_file and self.genOpts.filename): + self.newline() + write('#endif', file=self.outFile) + if self.errFile: + self.errFile.flush() + if self.warnFile: + self.warnFile.flush() + if self.diagFile: + self.diagFile.flush() + if self.outFile: + self.outFile.flush() + if self.outFile != sys.stdout and self.outFile != sys.stderr: + self.outFile.close() + + if self.genOpts is None: + raise MissingGeneratorOptionsError() + + # On successfully generating output, move the temporary file to the + # target file. + if self.genOpts.filename is not None: + directory = Path(self.genOpts.directory) + if sys.platform == 'win32': + if not Path.exists(directory): + os.makedirs(directory) + shutil.copy(self.outFile.name, directory / self.genOpts.filename) + os.remove(self.outFile.name) + self.genOpts = None + + def beginFeature(self, interface, emit): + """Write interface for a feature and tag generated features as having been done. + + - interface - element for the `` / `` to generate + - emit - actually write to the header only when True""" + + # Reset feature specific data sets + self.feature_struct_members = OrderedDict() + self.feature_struct_aliases = OrderedDict() + self.feature_cmd_params = OrderedDict() + + self.emit = emit + if interface is not None: + self.featureName = interface.get('name') + # If there is an additional 'protect' attribute in the feature, save it + self.featureExtraProtect = interface.get('protect') + else: + self.featureName = None + self.featureExtraProtect = None + + def endFeature(self): + """Finish an interface file, closing it when done. + + Derived classes responsible for emitting feature""" + self.featureName = None + self.featureExtraProtect = None + + # + # Indicates that the current feature has C++ code to generate. + # The subclass should override this method. + def need_feature_generation(self): + """Indicates that the current feature has C++ code to generate. + The subclass should override this method.""" + return False + + def generate_feature(self): + """Performs C++ code generation for the feature. + The subclass should override this method.""" + + def newline(self): + """Print a newline to the output file (utility function)""" + write('', file=self.outFile) + + def indent(self, value, spaces): + """Indent all lines in a string. + value - String to indent. + spaces - Number of spaces to indent. + """ + prefix = ' ' * spaces + return '\n'.join([prefix + v if v else v for v in value.split('\n')]) + def clean_type_define(self, type): rtn = '' types = type.split(" ") @@ -373,15 +768,329 @@ def get_value_info(self, param): is_com_outptr=self.is_com_outptr(struct_name, name, full_type) ) - def get_api_prefix(self): - return 'Dx12' + def make_decoded_param_type(self, value): + """Create a type to use for a decoded parameter, using the decoder wrapper types for pointers.""" + type_name = value.base_type + + # is_pointer will be False for static arrays. + if value.is_pointer or value.is_array: + count = value.pointer_count + + if self.is_struct(type_name): + if (value.array_dimension and value.array_dimension == 1): + type_name = 'StructPointerDecoder'.format( + type_name + ) + else: + type_name = 'StructPointerDecoder'.format( + type_name + ) + elif self.is_class(value): + if count == 1: + type_name = 'format::HandleId' + else: + type_name = 'HandlePointerDecoder<{}*>'.format(type_name) + elif type_name == 'wchar_t': + if count > 1: + type_name = 'WStringArrayDecoder' + else: + type_name = 'WStringDecoder' + elif type_name == 'char': + if count > 1: + type_name = 'StringArrayDecoder' + else: + type_name = 'StringDecoder' + elif type_name == 'void': + if value.is_array: + if (count > 1): + # If this was a pointer to memory (void**) allocated internally by the implementation, it was encoded as + # an array of bytes but must be retrieved as a pointer to a memory allocation. For this case, the array + # length value defines the size of the memory referenced by the single retrieved pointer. + type_name = 'PointerDecoder' + else: + # If this was an array (void*) it was encoded as an array of bytes. + type_name = 'PointerDecoder' + elif count > 1: + # If this was a pointer to a pointer to an unknown object (void**), it was encoded as a pointer to a 64-bit address value. + # So, we specify uint64_t as the decode type and void* as the type to be used for Vulkan API call output parameters. + type_name = 'PointerDecoder' + else: + # If this was a pointer to an unknown object (void*), it was encoded as a 64-bit address value. + type_name = 'uint64_t' + elif self.is_handle(type_name): + type_name = 'HandlePointerDecoder<{}>'.format(type_name) + else: + if count > 1: + type_name = 'PointerDecoder<{}*>'.format(type_name) + else: + type_name = 'PointerDecoder<{}>'.format(type_name) + elif self.is_function_ptr(type_name): + # Function pointers are encoded as a 64-bit address value. + type_name = 'uint64_t' + elif self.is_struct(type_name): + type_name = 'Decoded_{}'.format(type_name) + elif self.is_handle(type_name): + type_name = 'format::HandleId' + else: + type_name = '{}'.format(type_name) - def get_prefix_from_type(self, type): + return type_name + + def check_struct_member_handles( + self, + typename, + structs_with_handles, + structs_with_handle_ptrs=None, + ignore_output=False, + structs_with_map_data=None, + extra_types=None + ): + """Determines if the specified struct type contains members that have a handle type or are structs that contain handles. + Structs with member handles are added to a dictionary, where the key is the structure type and the value is a list of the handle members. + An optional list of structure types that contain handle members with pointer types may also be generated. + """ + handles = [] + has_handle_pointer = False + map_data = [] + for value in self.feature_struct_members[typename]: + if self.is_handle(value.base_type) or self.is_class(value) or ( + extra_types and value.base_type in extra_types + ): + # The member is a handle. + handles.append(value) + if ( + (structs_with_handle_ptrs is not None) + and (value.is_pointer or value.is_array) + ): + has_handle_pointer = True + elif self.is_struct(value.base_type) and ( + (value.base_type in structs_with_handles) and + ((not ignore_output) or (not '_Out_' in value.full_type)) + ): + # The member is a struct that contains a handle. + handles.append(value) + if ( + (structs_with_handle_ptrs is not None) + and (value.name in structs_with_handle_ptrs) + ): + has_handle_pointer = True + elif self.is_union(value.base_type): + # Check the anonymous union for objects. + union_members = self.get_union_members(value.base_type) + for union_info in union_members: + if self.is_struct( + union_info.base_type + ) and (union_info.base_type in structs_with_handles): + handles.append(value) + has_handle_pointer = True + elif union_info.base_type in self.source_dict['class_dict' + ]: + handles.append(value) + has_handle_pointer = True + elif union_info.base_type in self.MAP_STRUCT_TYPE: + if (structs_with_map_data is not None): + map_data.append(value) + + if (structs_with_map_data is not None) and ( + (value.base_type in self.MAP_STRUCT_TYPE) or + (value.base_type in structs_with_map_data) + ): + map_data.append(value) + + if map_data: + structs_with_map_data[typename] = map_data + + if handles: + # Process the list of struct members a second time to check for + # members with the same type as the struct. The current struct + # type has not been added to the table of structs with handles + # yet, so we must check the struct members a second time, looking + # for members with the struct type, now that we know the current + # struct type contains members that are handles/objects. Any + # struct members that have the same type as the struct must be + # added to the handle member list. + for value in self.feature_struct_members[typename]: + if (value.base_type == typename) and ( + (not ignore_output) or (not '_Out_' in value.full_type) + ): + handles.append(value) + + structs_with_handles[typename] = handles + if (structs_with_handle_ptrs is not None) and has_handle_pointer: + structs_with_handle_ptrs.append(typename) + return True + return False + + def make_consumer_func_decl( + self, return_type, name, values, dx12_method=False + ): + """make_consumer_decl - return VulkanConsumer class member function declaration. + Generate VulkanConsumer class member function declaration. + """ + param_decls = [] + param_decl = self.make_aligned_param_decl( + 'const ApiCallInfo&', 'call_info', self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + if dx12_method: + param_decl = self.make_aligned_param_decl( + 'format::HandleId', 'object_id', self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + if return_type != 'void': + method_name = name[name.find('::Process_') + 10:] + return_value = self.get_return_value_info( + return_type, method_name + ) + rtn_type1 = self.make_decoded_param_type(return_value) + if rtn_type1.find('Decoder') != -1: + rtn_type1 += '*' + param_decl = self.make_aligned_param_decl( + rtn_type1, 'return_value', self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + for value in values: + param_type = self.make_decoded_param_type(value) + + if 'Decoder' in param_type: + param_type = '{}*'.format(param_type) + + param_decl = self.make_aligned_param_decl( + param_type, value.name, self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + if param_decls: + return 'void {}(\n{})'.format(name, ',\n'.join(param_decls)) + + return 'void {}()'.format(name) + + def make_dump_resources_func_decl( + self, return_type, name, values, is_override + ): + """make_consumer_decl - return VulkanConsumer class member function declaration. + Generate VulkanConsumer class member function declaration. + """ + param_decls = [] + param_decl = self.make_aligned_param_decl( + 'const ApiCallInfo&', 'call_info', self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + param_decl = self.make_aligned_param_decl( + 'PFN_' + name.rsplit('_', 1)[1], 'func', self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + if return_type != 'void': + param_decl = self.make_aligned_param_decl( + return_type, 'returnValue', self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + for value in values: + type_name = value.base_type + + if is_override: + if value.is_pointer or value.is_array: + count = value.pointer_count + if self.is_struct(type_name): + param_type = 'StructPointerDecoder*'.format( + type_name + ) + elif self.is_class(value): + if count == 1: + param_type = type_name[2:] + 'Info*' + else: + param_type = 'HandlePointerDecoder<{}*>'.format(type_name) + else: + param_type = 'const ' + type_name + '*' + else: + param_type = type_name + else: + if value.is_pointer or value.is_array: + count = value.pointer_count + param_type = 'const ' + type_name + '*' + if count > 1: + param_type += ' const *' * (count - 1) + else: + param_type = type_name + + param_decl = self.make_aligned_param_decl( + param_type, value.name, self.INDENT_SIZE, + self.genOpts.align_func_param + ) + param_decls.append(param_decl) + + if param_decls: + return 'void {}(\n{})'.format(name, ',\n'.join(param_decls)) + + return 'void {}()'.format(name) + + def get_api_prefix(self): return 'Dx12' def get_wrapper_prefix_from_type(self): return 'object_wrappers' + def is_resource_dump_class(self): + return True if ('ReplayDumpResources' in self.__class__.__name__) else False + + def is_dump_resources_api_call(self, call_name): + return False + + def get_generic_struct_handle_type_value(self, struct_name, member_name): + """For a struct member that contains a generic handle value, retrieve the struct member + containing an enum value defining the specific handle type. Generic handles have an + integer type such as uint64_t, with an associated enum value defining the specific + type such as VkObjectType. + """ + if struct_name in self.GENERIC_HANDLE_STRUCTS: + struct_entry = self.GENERIC_HANDLE_STRUCTS[struct_name] + if member_name in struct_entry: + return struct_entry[member_name] + return None + + def get_generic_cmd_handle_type_value(self, cmd_name, param_name): + """For an API call parameter that contains a generic handle value, retrieve the parameter + containing an enum value defining the specific handle type. Generic handles have an + integer type such as uint64_t, with an associated enum value defining the specific + type such as VkObjectType. + """ + if cmd_name in self.GENERIC_HANDLE_APICALLS: + cmd_entry = self.GENERIC_HANDLE_APICALLS[cmd_name] + if param_name in cmd_entry: + return cmd_entry[param_name] + return None + + def is_generic_struct_handle_value(self, struct_name, member_name): + """Determine if a struct member contains a generic handle value. Generic handles have an + integer type such as uint64_t, with an associated enum value defining the specific + type such as VkObjectType. + """ + if self.get_generic_struct_handle_type_value(struct_name, member_name): + return True + return False + + def is_generic_cmd_handle_value(self, cmd_name, param_name): + """Determine if an API call parameter contains a generic handle value. Generic handles have an + integer type such as uint64_t, with an associated enum value defining the specific + type such as VkObjectType. + """ + if self.get_generic_cmd_handle_type_value(cmd_name, param_name): + return True + return False + def genType(self, typeinfo, name, alias): """Method override.""" self.genStruct(None, None, None) @@ -423,6 +1132,174 @@ def gen_method(self): self.make_value_info(m['parameters']) ) + def is_struct(self, base_type): + """Check for struct type.""" + if ( + (base_type in self.struct_names) + or (base_type in self.PLATFORM_STRUCTS) + ): + return True + return False + + def is_class(self, value): + return False + + def is_handle(self, base_type): + return False + + def has_basetype(self, base_type): + return False + + def get_basetype(self, base_type): + return self.base_types[base_type] + + def is_function_ptr(self, base_type): + """Check for function pointer type.""" + if (base_type[:4] == 'PFN_') or (base_type[-4:] == 'Func'): + return True + return False + + def is_array_len(self, name, values): + """Determine if the value name specifies an array length.""" + for value in values: + if name == value.array_length: + return True + return False + + def get_pointer_count(self, full_type): + """Return the number of '*' in a type declaration.""" + return full_type.count('*') + + def is_input_pointer(self, value): + """Determine if a pointer parameter is an input parameter.""" + if 'const' in value.full_type: + # Vulkan seems to follow a pattern where input pointers will be const and output pointers will not be const. + return True + elif value.platform_base_type and value.base_type == 'void' and value.pointer_count == 1: + # For some extensions, platform specific handles are mapped to the 'void*' type without a const qualifier, + # but need to be treated as an input (eg. if HANDLE is mapped to void*, it should not be treated as an output). + return True + return False + + def is_output_parameter(self, value): + """Determine if a parameter is an output parameter.""" + # Check for an output pointer/array or an in-out pointer. + if ( + (value.is_pointer or value.is_array) + and not self.is_input_pointer(value) + ): + return True + return False + + def get_array_len(self, param): + """Retrieve the length of an array defined by a or element.""" + result = None + len = param.attrib.get('len') + if len: + # Check for a string or array of strings + if 'null-terminated' in len: + if len == 'null-terminated': + paramname = param.find('name') + if (paramname.tail is not None) and ('[' in paramname.tail): + paramenumsizes = param.findall('enum') + for paramenumsize in paramenumsizes: + result = paramenumsize.text + else: + # For string arrays, 'len' can look like 'count,null-terminated', indicating that we have an array of null terminated + # strings. We strip the null-terminated substring from the 'len' field and only return the parameter specifying the string count. + result = len.split(',')[0] + else: + paramname = param.find('name') + # If there is an enum inside "[...]", return the enum + if (paramname.tail is not None) and ('[' in paramname.tail): + result = None + paramenumsizes = param.findall('enum') + for paramenumsize in paramenumsizes: + result = paramenumsize.text + else: + result = len + if result: + result = str(result).replace('::', '->') + else: + # Check for a static array + paramname = param.find('name') + if (paramname.tail is not None) and ('[' in paramname.tail): + paramenumsizes = param.findall('enum') + if paramenumsizes: + first = True + for paramenumsize in paramenumsizes: + if first: + first = False + result = paramenumsize.text + else: + result +=', ' + result += paramenumsize.text + else: + paramsizes = paramname.tail[1:-1].split('][') + sizetokens = [] + for paramsize in paramsizes: + sizetokens.append(paramsize) + result = ', '.join(sizetokens) + return result + + def is_static_array(self, param): + """Check for a static array.""" + name = param.find('name') + if (name.tail is not None) and ('[' in name.tail): + return True + return False + + def get_static_array_len(self, name, params, capacity): + """Determine the length value of a static array (get_array_len() returns the total capacity, not the actual length).""" + # The XML registry does not provide a direct method for determining if a parameter provides the length + # of a static array, but the parameter naming follows a pattern of array name = 'values' and length + # name = 'value_count'. We will search the parameter list for a length parameter using this pattern. + length_name = name[:-1] + 'Count' + for param in params: + if length_name == noneStr(param.find('name').text): + return length_name + + # Not all static arrays have an associated length parameter. These will use capacity as length. + return capacity + + def is_struct_black_listed(self, typename): + """Determines if a struct with the specified typename is blacklisted.""" + if typename in self.STRUCT_BLACKLIST: + return True + return False + + def is_cmd_black_listed(self, name): + """Determines if a function with the specified typename is blacklisted.""" + if name in self.APICALL_BLACKLIST: + return True + if 'Decoder' in self.__class__.__name__ and name in self.APICALL_DECODER_BLACKLIST: + return True + if 'Encoder' in self.__class__.__name__ and name in self.APICALL_ENCODER_BLACKLIST: + return True + return False + + def is_method_black_listed(self, class_name, method_name=None): + """Determines if a method call with the specified typename is blacklisted.""" + combined_name = class_name + if method_name: + combined_name += '_' + method_name + if combined_name in self.METHODCALL_BLACKLIST: + return True + return False + + def is_manually_generated_cmd_name(self, command): + """Determines if a command is in the list of manually generated command names.""" + if self.MANUALLY_GENERATED_COMMANDS is not None and command in self.MANUALLY_GENERATED_COMMANDS: + return True + return False + + def get_filtered_cmd_names(self): + """Retrieves a filtered list of keys from self.feature_cmd_params with blacklisted items removed.""" + return [ + key for key in self.feature_cmd_params + if not self.is_cmd_black_listed(key) + ] + def get_filtered_method_names(self): return [ key for key in self.feature_method_params @@ -620,16 +1497,67 @@ def convert_function(self, type): return e[1] return type + def make_unique_list(self, in_list): + """Return a copy of in_list with duplicates removed, preserving order.""" + out_list = [] + for value in in_list: + if value not in out_list: + out_list.append(value) + return out_list + + def make_arg_list(self, values): + """Create a string containing a comma separated argument list from a list of ValueInfo values. + values - List of ValueInfo objects providing the parameter names for the argument list. + """ + return ', '.join([value.name for value in values]) + + def make_aligned_param_decl( + self, param_type, param_name, indent_column, align_column + ): + """make_aligned_param_decl - return an indented parameter declaration string with the parameter + name aligned to the specified column. + """ + param_decl = ' ' * indent_column + param_decl += param_type + + if align_column: + param_decl = param_decl.ljust(align_column - 1) + + param_decl += ' ' + param_decl += param_name + + return param_decl + def make_invocation_type_name(self, base_type): - """Method override.""" + """Convert a type name to a string to be used as part of an encoder/decoder function/method name.""" type = self.convert_function(base_type) - type = BaseGenerator.make_invocation_type_name(self, type) - if type == 'Function': - type = 'FunctionPtr' - else: - union = self.is_union(type) - if union: - type = 'Union' + if self.is_struct(type): + return type + elif self.is_union(type): + return 'Union' + elif self.is_enum(type): + return 'Enum' + elif type == 'wchar_t': + return 'WString' + elif type == 'char': + return 'String' + elif type == 'Function' or self.is_function_ptr(type): + return 'FunctionPtr' + elif type == 'size_t': + return 'SizeT' + elif type == 'int': + # Extensions use the int type when dealing with file descriptors + return 'Int32' + elif type.endswith('_t'): + if type[0] == 'u': + # For unsigned types, capitalize the first two characters. + return type[0].upper() + type[1].upper( + ) + type[2:-2] + else: + return type[:-2].title() + elif type[0].islower(): + return type.title() + return type def is_required_function_data(self, function_source_data): @@ -694,3 +1622,43 @@ def is_output(self, value): -1) or (value.full_type.find('_Inout') != -1): return True return False + + def make_array_length_expression(self, value, prefix=''): + """Generate an expression for the length of a given array value.""" + length_expr = value.array_length + length_value = value.array_length_value + + if length_value: + if length_value.is_pointer: + # Add implicit dereference when length expr == pointer name + if length_value.name == length_expr: + length_expr = '*' + length_expr + # Add null check to length value behind pointer + length_expr = '({length_value.name} != nullptr) ? ({length_expr}) : 0'.format( + length_value=length_value, length_expr=length_expr + ) + # Add prefix to parameter in the length expression + length_expr = length_expr.replace( + length_value.name, prefix + length_value.name + ) + return length_expr + + def make_array2d_length_expression(self, value, values, prefix=''): + length_exprs = value.array_length.split(',') + if len(length_exprs) == value.pointer_count: + # All dimensions are provided in the xml + lengths = [] + for length_expr in length_exprs: + # Prefix members + for v in values: + length_expr = re.sub( + r'\b({})\b'.format(v.name), r'{}\1'.format(prefix), + length_expr + ) + lengths.append(length_expr) + return lengths + else: + # XML does not provide lengths for all dimensions, instantiate a specialization of ArraySize2D to fetch the sizes + type_list = ', '.join([self.clean_type_define(v.full_type) for v in values]) + arg_list = ', '.join([v.name for v in values]) + return ['ArraySize2D<{}>({})'.format(type_list, arg_list)] diff --git a/framework/generated/dx12_generators/dx12_base_replay_consumer_body_generator.py b/framework/generated/dx12_generators/dx12_base_replay_consumer_body_generator.py new file mode 100644 index 0000000000..bc10b0886a --- /dev/null +++ b/framework/generated/dx12_generators/dx12_base_replay_consumer_body_generator.py @@ -0,0 +1,62 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2018-2020 Valve Corporation +# Copyright (c) 2018-2020 LunarG, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +from dx12_base_generator import write + + +class Dx12BaseReplayConsumerBodyGenerator(): + """Base class for generating Dx12 replay consumers body code.""" + + def generate_feature(self): + """Performs C++ code generation for the feature.""" + platform_type = self.get_api_prefix() + + first = True + for cmd in self.get_filtered_cmd_names(): + + if self.is_resource_dump_class() and self.is_dump_resources_api_call(cmd) == False: + continue + + info = self.feature_cmd_params[cmd] + return_type = info[0] + values = info[2] + + cmddef = '' if first else '\n' + if self.is_resource_dump_class(): + cmddef += self.make_dump_resources_func_decl( + return_type, + '{}ReplayDumpResources::Process_'.format(platform_type) + cmd, + values, cmd in self.DUMP_RESOURCES_OVERRIDES + ) + '\n' + else: + cmddef += self.make_consumer_func_decl( + return_type, + '{}ReplayConsumer::Process_'.format(platform_type) + cmd, + values + ) + '\n' + cmddef += '{\n' + cmddef += self.make_consumer_func_body(return_type, cmd, values) + cmddef += '}' + + write(cmddef, file=self.outFile) + first = False diff --git a/framework/generated/dx12_generators/dx12_base_struct_decoders_body_generator.py b/framework/generated/dx12_generators/dx12_base_struct_decoders_body_generator.py new file mode 100644 index 0000000000..5b5ebe8582 --- /dev/null +++ b/framework/generated/dx12_generators/dx12_base_struct_decoders_body_generator.py @@ -0,0 +1,236 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2018-2020 Valve Corporation +# Copyright (c) 2018-2020 LunarG, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +from dx12_base_generator import write + + +class Dx12BaseStructDecodersBodyGenerator(): + """Base class for generating dx12 struct docoder body code.""" + + def generate_feature(self): + """Performs C++ code generation for the feature.""" + first = True + for struct in self.get_filtered_struct_names(): + body = '' if first else '\n' + body += 'size_t DecodeStruct(const uint8_t* buffer, size_t buffer_size, Decoded_{}* wrapper)\n'.format( + struct + ) + body += '{\n' + body += ' assert((wrapper != nullptr) && (wrapper->decoded_value != nullptr));\n' + body += '\n' + body += ' size_t bytes_read = 0;\n' + body += ' {}* value = wrapper->decoded_value;\n'.format(struct) + body += '\n' + body += self.make_decode_struct_body( + struct, self.feature_struct_members[struct] + ) + body += '\n' + body += ' return bytes_read;\n' + body += '}' + + write(body, file=self.outFile) + first = False + + def make_decode_struct_body(self, name, values): + """Generate C++ code for the decoder method body.""" + body = '' + + for value in values: + # pNext fields require special treatment and are not processed by type name + if 'pNext' in value.name and value.base_type == 'void': + body += ' bytes_read += DecodePNextStruct((buffer + bytes_read), (buffer_size - bytes_read), &(wrapper->{}));\n'.format( + value.name + ) + body += ' value->pNext = wrapper->pNext ? wrapper->pNext->GetPointer() : nullptr;\n' + else: + body += Dx12BaseStructDecodersBodyGenerator.make_decode_invocation( + self, name, value + ) + + return body + + def make_decode_invocation(self, name, value): + """Generate the struct member decoder function call invocation.""" + buffer_args = '(buffer + bytes_read), (buffer_size - bytes_read)' + + body = '' + + is_struct = False + is_class = False + is_string = False + is_funcp = False + is_handle = False + is_enum = False + + type_name = self.make_invocation_type_name(value.base_type) + + if self.is_struct(type_name): + is_struct = True + elif self.is_class(value): + is_class = True + elif type_name in ['String', 'WString']: + is_string = True + elif type_name == 'FunctionPtr': + is_funcp = True + elif self.is_handle(value.base_type): + is_handle = True + elif type_name == 'Enum': + is_enum = True + + # is_pointer will be False for static arrays. + if value.is_pointer or value.is_array: + if type_name in self.EXTERNAL_OBJECT_TYPES and not value.is_array: + # Pointer to an unknown object type, encoded as a 64-bit integer ID. + body += ' bytes_read += ValueDecoder::DecodeAddress({}, &(wrapper->{}));\n'.format( + buffer_args, value.name + ) + body += ' value->{} = nullptr;\n'.format(value.name) + else: + is_static_array = True if ( + value.is_array and not value.is_dynamic + ) else False + access_op = '.' + + if is_struct: + body += ' wrapper->{} = DecodeAllocator::Allocate<{}>();\n'.format( + value.name, self.make_decoded_param_type(value) + ) + access_op = '->' + + if is_static_array: + array_dimension = '' + # dx12 treats 2d array as 1d array. EX: [8][2] -> [16], so dx12's 2d array needs *. + # But vk keeps 2d array. + if value.array_dimension and value.array_dimension > 0: + array_dimension = '*' + # The pointer decoder will write directly to the struct member's memory. + body += ' wrapper->{name}{}SetExternalMemory({}value->{name}, {arraylen});\n'.format( + access_op, + array_dimension, + name=value.name, + arraylen=value.array_capacity + ) + + if is_struct or is_string or is_handle or ( + is_class and value.pointer_count > 1 + ): + body += ' bytes_read += wrapper->{}{}Decode({});\n'.format( + value.name, access_op, buffer_args + ) + elif is_class and value.pointer_count == 1: + body += ' bytes_read += ValueDecoder::DecodeHandleIdValue({}, &(wrapper->{}));\n'.format( + buffer_args, value.name + ) + elif self.has_basetype(value.base_type): + base_type = self.get_basetype(value.base_type) + body += ' bytes_read += wrapper->{}.Decode{}({});\n'.format( + value.name, self.encode_types[base_type], buffer_args + ) + else: + body += ' bytes_read += wrapper->{}.Decode{}({});\n'.format( + value.name, type_name, buffer_args + ) + + if not is_static_array: + if is_handle or is_class: + # Point the real struct's member pointer to the handle pointer decoder's handle memory. + body += ' value->{} = nullptr;\n'.format(value.name) + else: + # Point the real struct's member pointer to the pointer decoder's memory. + convert_const_cast_begin = '' + convert_const_cast_end = '' + + if value.full_type.find('LPCWSTR *') != -1: + convert_const_cast_end = ')' + convert_const_cast_begin = 'const_cast(' + + elif value.full_type.find('LPCSTR *') != -1: + convert_const_cast_end = ')' + convert_const_cast_begin = 'const_cast(' + + body += ' value->{name} = {}wrapper->{name}{}GetPointer(){};\n'.format( + convert_const_cast_begin, + access_op, + convert_const_cast_end, + name=value.name + ) + else: + if is_struct: + body += ' wrapper->{} = DecodeAllocator::Allocate<{}>();\n'.format( + value.name, self.make_decoded_param_type(value) + ) + body += ' wrapper->{name}->decoded_value = &(value->{name});\n'.format( + name=value.name + ) + body += ' bytes_read += DecodeStruct({}, wrapper->{});\n'.format( + buffer_args, value.name + ) + elif is_funcp: + body += ' bytes_read += ValueDecoder::DecodeAddress({}, &(wrapper->{}));\n'.format( + buffer_args, value.name + ) + body += ' value->{} = nullptr;\n'.format(value.name) + elif is_handle: + body += ' bytes_read += ValueDecoder::DecodeHandleIdValue({}, &(wrapper->{}));\n'.format( + buffer_args, value.name + ) + body += ' value->{} = VK_NULL_HANDLE;\n'.format(value.name) + elif self.is_generic_struct_handle_value(name, value.name): + body += ' bytes_read += ValueDecoder::DecodeUInt64Value({}, &(wrapper->{}));\n'.format( + buffer_args, value.name + ) + body += ' value->{} = 0;\n'.format(value.name) + elif value.bitfield_width: + # Bit fields need to be read into a tempoaray and then assigned to the struct member. + temp_param_name = 'temp_{}'.format(value.name) + body += ' {} {};\n'.format(value.base_type, temp_param_name) + body += ' bytes_read += ValueDecoder::Decode{}Value({}, &{});\n'.format( + type_name, buffer_args, temp_param_name + ) + body += ' value->{} = {};\n'.format( + value.name, temp_param_name + ) + elif is_enum: + body += ' bytes_read += ValueDecoder::DecodeEnumValue({}, &(value->{}));\n'.format( + buffer_args, value.name + ) + elif self.has_basetype(type_name): + base_type = self.get_basetype(type_name) + body += ' bytes_read += ValueDecoder::Decode{}Value({}, &(value->{}));\n'.format( + self.encode_types[base_type], buffer_args, value.name + ) + elif 'Flags' in type_name: + if 'Flags64' in type_name: + body += ' bytes_read += ValueDecoder::DecodeFlags64Value({}, &(value->{}));\n'.format( + buffer_args, value.name + ) + else: + body += ' bytes_read += ValueDecoder::DecodeFlagsValue({}, &(value->{}));\n'.format( + buffer_args, value.name + ) + else: + body += ' bytes_read += ValueDecoder::Decode{}Value({}, &(value->{}));\n'.format( + type_name, buffer_args, value.name + ) + + return body diff --git a/framework/generated/dx12_generators/dx12_base_struct_decoders_header_generator.py b/framework/generated/dx12_generators/dx12_base_struct_decoders_header_generator.py new file mode 100644 index 0000000000..70df9a0d7a --- /dev/null +++ b/framework/generated/dx12_generators/dx12_base_struct_decoders_header_generator.py @@ -0,0 +1,113 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2018 Valve Corporation +# Copyright (c) 2018 LunarG, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +from dx12_base_generator import write + + +class Dx12BaseStructDecodersHeaderGenerator(): + """Base class for generating dx12 struct decoder header code.""" + + def generate_feature(self): + """Performs C++ code generation for the feature.""" + first = True + for struct in self.get_filtered_struct_names(): + body = '' if first else '\n' + body += 'struct Decoded_{}\n'.format(struct) + body += '{\n' + body += ' using struct_type = {};\n'.format(struct) + body += '\n' + body += ' {}* decoded_value{{ nullptr }};\n'.format(struct) + + decls = self.make_member_declarations( + struct, self.feature_struct_members[struct] + ) + if decls: + body += '\n' + body += decls + + body += '};' + + write(body, file=self.outFile) + first = False + + # Write typedefs for any aliases + for struct in self.feature_struct_aliases: + body = '' if first else '\n' + body += 'typedef Decoded_{} Decoded_{};'.format( + self.feature_struct_aliases[struct], struct + ) + write(body, file=self.outFile) + first = False + + def needs_member_declaration(self, name, value): + """Determines if a Vulkan struct member needs an associated member + delcaration in the decoded struct wrapper. + """ + if value.is_pointer or value.is_array: + return True + elif self.is_function_ptr(value.base_type): + return True + elif self.is_handle(value.base_type): + return True + elif self.is_struct(value.base_type): + return True + elif self.is_generic_struct_handle_value(name, value.name): + return True + return False + + def get_default_init_value(self, type): + """Determines if the struct member requires default initalization and + determines the value to use. + """ + if type == 'format::HandleId': + # These types represent values recorded for Vulkan handles. + return 'format::kNullHandleId' + elif type == 'uint64_t': + # These types represent values recorded for function pointers and void pointers to non-Vulkan objects. + return '0' + return None + + def make_member_declarations(self, name, values): + """Generate the struct member declarations for the decoded struct wrapper.""" + body = '' + + for value in values: + if self.needs_member_declaration(name, value): + type_name = self.make_decoded_param_type(value) + if self.is_struct(value.base_type): + type_name = '{}*'.format(type_name) + + default_value = self.get_default_init_value(type_name) + if default_value: + body += ' {} {}{{ {} }};\n'.format( + type_name, value.name, default_value + ) + else: + if self.is_struct(value.base_type): + body += ' {} {}{{ nullptr }};\n'.format( + type_name, value.name + ) + else: + body += ' {} {};\n'.format(type_name, value.name) + + return body diff --git a/framework/generated/dx12_generators/dx12_call_id_to_string_header_generator.py b/framework/generated/dx12_generators/dx12_call_id_to_string_header_generator.py index 2e6ef16172..3702de5bac 100644 --- a/framework/generated/dx12_generators/dx12_call_id_to_string_header_generator.py +++ b/framework/generated/dx12_generators/dx12_call_id_to_string_header_generator.py @@ -22,8 +22,7 @@ # IN THE SOFTWARE. import sys -from base_generator import BaseGenerator, write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write class Dx12CallIdToStringHeaderGenerator(Dx12BaseGenerator): @@ -44,7 +43,7 @@ def __init__( ) def beginFile(self, gen_opts): - BaseGenerator.beginFile(self, gen_opts) + Dx12BaseGenerator.beginFile(self, gen_opts) self.write_include() write('GFXRECON_BEGIN_NAMESPACE(gfxrecon)', file=self.outFile) @@ -115,4 +114,4 @@ def endFile(self): write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) # Finish processing in superclass - BaseGenerator.endFile(self) + Dx12BaseGenerator.endFile(self) diff --git a/framework/generated/dx12_generators/dx12_command_list_util_body_generator.py b/framework/generated/dx12_generators/dx12_command_list_util_body_generator.py index 3831feefdf..81833a7676 100644 --- a/framework/generated/dx12_generators/dx12_command_list_util_body_generator.py +++ b/framework/generated/dx12_generators/dx12_command_list_util_body_generator.py @@ -21,7 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write +from dx12_base_generator import write from dx12_command_list_util_header_generator import Dx12CommandListUtilHeaderGenerator diff --git a/framework/generated/dx12_generators/dx12_command_list_util_header_generator.py b/framework/generated/dx12_generators/dx12_command_list_util_header_generator.py index 46683284bd..049c2c3156 100644 --- a/framework/generated/dx12_generators/dx12_command_list_util_header_generator.py +++ b/framework/generated/dx12_generators/dx12_command_list_util_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write class Dx12CommandListUtilHeaderGenerator(Dx12BaseGenerator): diff --git a/framework/generated/dx12_generators/dx12_consumer_header_generator.py b/framework/generated/dx12_generators/dx12_consumer_header_generator.py index 778bd54255..04044a8155 100644 --- a/framework/generated/dx12_generators/dx12_consumer_header_generator.py +++ b/framework/generated/dx12_generators/dx12_consumer_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import BaseGenerator, write -from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions +from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions, write class Dx12ConsumerHeaderGeneratorOptions(Dx12GeneratorOptions): @@ -65,7 +64,7 @@ def __init__( def beginFile(self, gen_opts): """Method override.""" - BaseGenerator.beginFile(self, gen_opts) + Dx12BaseGenerator.beginFile(self, gen_opts) if gen_opts.constructor_args: self.constructor_args = gen_opts.constructor_args @@ -256,4 +255,4 @@ def endFile(self): write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) # Finish processing in superclass - BaseGenerator.endFile(self) + Dx12BaseGenerator.endFile(self) diff --git a/framework/generated/dx12_generators/dx12_decoder_body_generator.py b/framework/generated/dx12_generators/dx12_decoder_body_generator.py index dee7da4737..adbea98791 100644 --- a/framework/generated/dx12_generators/dx12_decoder_body_generator.py +++ b/framework/generated/dx12_generators/dx12_decoder_body_generator.py @@ -21,29 +21,17 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from dx12_decoder_header_generator import Dx12DecoderHeaderGenerator -from base_struct_decoders_body_generator import BaseStructDecodersBodyGenerator -from base_decoder_body_generator import BaseDecoderBodyGenerator +from dx12_base_struct_decoders_body_generator import Dx12BaseStructDecodersBodyGenerator +from dx12_base_decoder_body_generator import Dx12BaseDecoderBodyGenerator class Dx12DecoderBodyGenerator( - Dx12DecoderHeaderGenerator, BaseStructDecodersBodyGenerator, - BaseDecoderBodyGenerator + Dx12DecoderHeaderGenerator, Dx12BaseStructDecodersBodyGenerator, + Dx12BaseDecoderBodyGenerator ): """Generates C++ functions responsible for decoding Dx12 API calls.""" - def __init__(self, source_dict, dx12_prefix_strings, diag_file): - Dx12DecoderHeaderGenerator.__init__(self, source_dict, dx12_prefix_strings, - err_file=sys.stderr, - warn_file=sys.stderr, - diag_file=sys.stdout, - feature_break=False - ) - - BaseStructDecodersBodyGenerator.__init__(self) - BaseDecoderBodyGenerator.__init__(self) - def write_include(self): """Method override.""" @@ -58,11 +46,11 @@ def write_include(self): def generate_feature(self): """Method override.""" + self.cmd_names = [] self.method_names = [] Dx12BaseGenerator.generate_feature(self) self.write_function_call() - BaseDecoderBodyGenerator.generate_feature(self) - BaseDecoderBodyGenerator.generate_commands(self) # Dx12 only has one "feature", so no need to push this to endFile + Dx12BaseDecoderBodyGenerator.generate_feature(self) self.newline() self.generate_dx12_method_feature() diff --git a/framework/generated/dx12_generators/dx12_decoder_header_generator.py b/framework/generated/dx12_generators/dx12_decoder_header_generator.py index 68447e9e28..e24d920f37 100644 --- a/framework/generated/dx12_generators/dx12_decoder_header_generator.py +++ b/framework/generated/dx12_generators/dx12_decoder_header_generator.py @@ -21,13 +21,12 @@ # IN THE SOFTWARE. import sys -from base_generator import BaseGenerator, write -from dx12_base_generator import Dx12BaseGenerator -from base_struct_decoders_header_generator import BaseStructDecodersHeaderGenerator +from dx12_base_generator import Dx12BaseGenerator, write +from dx12_base_struct_decoders_header_generator import Dx12BaseStructDecodersHeaderGenerator class Dx12DecoderHeaderGenerator( - Dx12BaseGenerator, BaseStructDecodersHeaderGenerator + Dx12BaseGenerator, Dx12BaseStructDecodersHeaderGenerator ): """Generates C++ functions responsible for decoding Dx12 API calls.""" @@ -37,17 +36,16 @@ def __init__( dx12_prefix_strings, err_file=sys.stderr, warn_file=sys.stderr, - diag_file=sys.stdout, - feature_break=True + diag_file=sys.stdout ): Dx12BaseGenerator.__init__( self, source_dict, dx12_prefix_strings, err_file, warn_file, - diag_file, feature_break=feature_break + diag_file ) def beginFile(self, gen_opts): """Method override.""" - BaseGenerator.beginFile(self, gen_opts) + Dx12BaseGenerator.beginFile(self, gen_opts) self.write_include() write('GFXRECON_BEGIN_NAMESPACE(gfxrecon)', file=self.outFile) @@ -164,4 +162,4 @@ def endFile(self): write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) # Finish processing in superclass - BaseGenerator.endFile(self) + Dx12BaseGenerator.endFile(self) diff --git a/framework/generated/dx12_generators/dx12_enum_to_json_header_generator.py b/framework/generated/dx12_generators/dx12_enum_to_json_header_generator.py index b960024e61..c820097a45 100644 --- a/framework/generated/dx12_generators/dx12_enum_to_json_header_generator.py +++ b/framework/generated/dx12_generators/dx12_enum_to_json_header_generator.py @@ -22,8 +22,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from dx12_enum_to_string_header_generator import Dx12EnumToStringHeaderGenerator from reformat_code import format_cpp_code diff --git a/framework/generated/dx12_generators/dx12_enum_to_string_body_generator.py b/framework/generated/dx12_generators/dx12_enum_to_string_body_generator.py index a17385d481..6db9813e82 100644 --- a/framework/generated/dx12_generators/dx12_enum_to_string_body_generator.py +++ b/framework/generated/dx12_generators/dx12_enum_to_string_body_generator.py @@ -22,8 +22,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from dx12_enum_to_string_header_generator import Dx12EnumToStringHeaderGenerator # For the list of substrings in bitflag class Dx12EnumToStringBodyGenerator(Dx12BaseGenerator): diff --git a/framework/generated/dx12_generators/dx12_enum_to_string_header_generator.py b/framework/generated/dx12_generators/dx12_enum_to_string_header_generator.py index 99386fd9d5..65e6bc5b34 100644 --- a/framework/generated/dx12_generators/dx12_enum_to_string_header_generator.py +++ b/framework/generated/dx12_generators/dx12_enum_to_string_header_generator.py @@ -22,8 +22,7 @@ # IN THE SOFTWARE. import sys, inspect -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write class Dx12EnumToStringHeaderGenerator(Dx12BaseGenerator): """TODO : Generates C++ functions responsible for Convert to texts.""" diff --git a/framework/generated/dx12_generators/dx12_json_consumer_body_generator.py b/framework/generated/dx12_generators/dx12_json_consumer_body_generator.py index 767e87c5d4..6eba08c2c7 100644 --- a/framework/generated/dx12_generators/dx12_json_consumer_body_generator.py +++ b/framework/generated/dx12_generators/dx12_json_consumer_body_generator.py @@ -22,8 +22,7 @@ # IN THE SOFTWARE. import json -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions +from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions, write from dx12_json_consumer_header_generator import Dx12JsonConsumerHeaderGenerator, Dx12JsonConsumerHeaderGeneratorOptions from dx12_json_common_generator import Dx12JsonCommonGenerator from reformat_code import format_cpp_code, remove_leading_empty_lines diff --git a/framework/generated/dx12_generators/dx12_json_consumer_header_generator.py b/framework/generated/dx12_generators/dx12_json_consumer_header_generator.py index 3e66132e34..a37a090ba8 100644 --- a/framework/generated/dx12_generators/dx12_json_consumer_header_generator.py +++ b/framework/generated/dx12_generators/dx12_json_consumer_header_generator.py @@ -21,8 +21,7 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from dx12_consumer_header_generator import Dx12ConsumerHeaderGenerator, Dx12ConsumerHeaderGeneratorOptions diff --git a/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py b/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py index 37b728423b..ce13807bbc 100644 --- a/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py +++ b/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py @@ -23,10 +23,9 @@ import json import sys import re -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions +from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions, write from dx12_replay_consumer_header_generator import Dx12ReplayConsumerHeaderGenerator, Dx12ReplayConsumerHeaderGeneratorOptions -from base_replay_consumer_body_generator import BaseReplayConsumerBodyGenerator +from dx12_base_replay_consumer_body_generator import Dx12BaseReplayConsumerBodyGenerator class Dx12ReplayConsumerBodyGeneratorOptions( @@ -54,7 +53,7 @@ def __init__( class Dx12ReplayConsumerBodyGenerator( - BaseReplayConsumerBodyGenerator, Dx12ReplayConsumerHeaderGenerator + Dx12BaseReplayConsumerBodyGenerator, Dx12ReplayConsumerHeaderGenerator ): """Generates C++ functions responsible for consuming Dx12 API calls.""" @@ -135,7 +134,7 @@ def generate_feature(self): header_dict ) Dx12BaseGenerator.generate_feature(self) - BaseReplayConsumerBodyGenerator.generate_feature(self) + Dx12BaseReplayConsumerBodyGenerator.generate_feature(self) self.generate_dx12_method_feature() def generate_dx12_method_feature(self): diff --git a/framework/generated/dx12_generators/dx12_replay_consumer_header_generator.py b/framework/generated/dx12_generators/dx12_replay_consumer_header_generator.py index 28ccf21d90..f176e55174 100644 --- a/framework/generated/dx12_generators/dx12_replay_consumer_header_generator.py +++ b/framework/generated/dx12_generators/dx12_replay_consumer_header_generator.py @@ -20,8 +20,7 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions +from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions, write from dx12_consumer_header_generator import Dx12ConsumerHeaderGenerator, Dx12ConsumerHeaderGeneratorOptions diff --git a/framework/generated/dx12_generators/dx12_state_table_header_generator.py b/framework/generated/dx12_generators/dx12_state_table_header_generator.py index 0fa3afaf44..f3f71ce19f 100644 --- a/framework/generated/dx12_generators/dx12_state_table_header_generator.py +++ b/framework/generated/dx12_generators/dx12_state_table_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates declarations for functions for DX12 state table diff --git a/framework/generated/dx12_generators/dx12_struct_decoders_body_generator.py b/framework/generated/dx12_generators/dx12_struct_decoders_body_generator.py index 1936d999c1..de35fc0c20 100644 --- a/framework/generated/dx12_generators/dx12_struct_decoders_body_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_decoders_body_generator.py @@ -21,16 +21,15 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from dx12_decoder_header_generator import Dx12DecoderHeaderGenerator -from base_struct_decoders_body_generator import BaseStructDecodersBodyGenerator -from base_decoder_body_generator import BaseDecoderBodyGenerator +from dx12_base_struct_decoders_body_generator import Dx12BaseStructDecodersBodyGenerator +from dx12_base_decoder_body_generator import Dx12BaseDecoderBodyGenerator class Dx12StructDecodersBodyGenerator( - Dx12DecoderHeaderGenerator, BaseStructDecodersBodyGenerator, - BaseDecoderBodyGenerator + Dx12DecoderHeaderGenerator, Dx12BaseStructDecodersBodyGenerator, + Dx12BaseDecoderBodyGenerator ): """Generates C++ functions responsible for decoding Dx12 API calls.""" @@ -61,4 +60,4 @@ def write_include(self): # Met#include "util/defines.h"hod override def generate_feature(self): Dx12BaseGenerator.generate_feature(self) - BaseStructDecodersBodyGenerator.generate_feature(self) + Dx12BaseStructDecodersBodyGenerator.generate_feature(self) diff --git a/framework/generated/dx12_generators/dx12_struct_decoders_forward_generator.py b/framework/generated/dx12_generators/dx12_struct_decoders_forward_generator.py index f390ff2eb3..bf3c6e5c3a 100644 --- a/framework/generated/dx12_generators/dx12_struct_decoders_forward_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_decoders_forward_generator.py @@ -21,13 +21,12 @@ # IN THE SOFTWARE. import sys -from base_generator import BaseGenerator, write -from dx12_base_generator import Dx12BaseGenerator -from base_struct_decoders_header_generator import BaseStructDecodersHeaderGenerator +from dx12_base_generator import Dx12BaseGenerator, write +from dx12_base_struct_decoders_header_generator import Dx12BaseStructDecodersHeaderGenerator class Dx12StructDecodersForwardGenerator( - Dx12BaseGenerator, BaseStructDecodersHeaderGenerator + Dx12BaseGenerator, Dx12BaseStructDecodersHeaderGenerator ): """Generates C++ functions responsible for decoding Dx12 API calls.""" @@ -47,7 +46,7 @@ def __init__( def beginFile(self, gen_opts): """Method override.""" - BaseGenerator.beginFile(self, gen_opts) + Dx12BaseGenerator.beginFile(self, gen_opts) self.write_include() write('GFXRECON_BEGIN_NAMESPACE(gfxrecon)', file=self.outFile) @@ -111,4 +110,4 @@ def endFile(self): write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) # Finish processing in superclass - BaseGenerator.endFile(self) + Dx12BaseGenerator.endFile(self) diff --git a/framework/generated/dx12_generators/dx12_struct_decoders_header_generator.py b/framework/generated/dx12_generators/dx12_struct_decoders_header_generator.py index a97c837e21..c3b1765b28 100644 --- a/framework/generated/dx12_generators/dx12_struct_decoders_header_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_decoders_header_generator.py @@ -21,13 +21,12 @@ # IN THE SOFTWARE. import sys -from base_generator import BaseGenerator, write -from dx12_base_generator import Dx12BaseGenerator -from base_struct_decoders_header_generator import BaseStructDecodersHeaderGenerator +from dx12_base_generator import Dx12BaseGenerator, write +from dx12_base_struct_decoders_header_generator import Dx12BaseStructDecodersHeaderGenerator class Dx12StructDecodersHeaderGenerator( - Dx12BaseGenerator, BaseStructDecodersHeaderGenerator + Dx12BaseGenerator, Dx12BaseStructDecodersHeaderGenerator ): """Generates C++ functions responsible for decoding Dx12 API calls.""" @@ -47,7 +46,7 @@ def __init__( def beginFile(self, gen_opts): """Method override.""" - BaseGenerator.beginFile(self, gen_opts) + Dx12BaseGenerator.beginFile(self, gen_opts) self.write_include() write('GFXRECON_BEGIN_NAMESPACE(gfxrecon)', file=self.outFile) @@ -57,7 +56,7 @@ def beginFile(self, gen_opts): def generate_feature(self): """Method override.""" Dx12BaseGenerator.generate_feature(self) - BaseStructDecodersHeaderGenerator.generate_feature(self) + Dx12BaseStructDecodersHeaderGenerator.generate_feature(self) def write_include(self): """Method override.""" @@ -85,4 +84,4 @@ def endFile(self): write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) # Finish processing in superclass - BaseGenerator.endFile(self) + Dx12BaseGenerator.endFile(self) diff --git a/framework/generated/dx12_generators/dx12_struct_decoders_to_json_body_generator.py b/framework/generated/dx12_generators/dx12_struct_decoders_to_json_body_generator.py index cea4eebaa6..e540dc90de 100644 --- a/framework/generated/dx12_generators/dx12_struct_decoders_to_json_body_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_decoders_to_json_body_generator.py @@ -52,8 +52,7 @@ ## import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from dx12_json_common_generator import Dx12JsonCommonGenerator from reformat_code import format_cpp_code diff --git a/framework/generated/dx12_generators/dx12_struct_decoders_to_json_header_generator.py b/framework/generated/dx12_generators/dx12_struct_decoders_to_json_header_generator.py index 0114ae51cd..3595664d5b 100644 --- a/framework/generated/dx12_generators/dx12_struct_decoders_to_json_header_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_decoders_to_json_header_generator.py @@ -21,8 +21,7 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import sys, inspect -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write from reformat_code import format_cpp_code, indent_cpp_code, remove_trailing_empty_lines class Dx12StructDecodersToJsonHeaderGenerator(Dx12BaseGenerator): diff --git a/framework/generated/dx12_generators/dx12_struct_object_mappers_body_generator.py b/framework/generated/dx12_generators/dx12_struct_object_mappers_body_generator.py index 2d6aaefe0f..22c5314c7b 100644 --- a/framework/generated/dx12_generators/dx12_struct_object_mappers_body_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_object_mappers_body_generator.py @@ -21,14 +21,10 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator -from base_struct_handle_mappers_body_generator import BaseStructHandleMappersBodyGenerator +from dx12_base_generator import Dx12BaseGenerator, write -class Dx12StructObjectMappersBodyGenerator( - BaseStructHandleMappersBodyGenerator, Dx12BaseGenerator -): +class Dx12StructObjectMappersBodyGenerator(Dx12BaseGenerator): """Generates C++ functions responsible for decoding Dx12 API calls.""" def __init__( @@ -96,7 +92,31 @@ def beginFile(self, gen_opts): def endFile(self): """Method override.""" - BaseStructHandleMappersBodyGenerator.endFile(self) + # Generate handle adding functions for output structs with handles + for struct in self.output_structs_with_handles: + self.newline() + write( + self.make_struct_handle_additions( + struct, self.structs_with_handles[struct] + ), + file=self.outFile + ) + + # Generate handle memory allocation functions for output structs with handles + for struct in self.output_structs_with_handles: + if struct in self.structs_with_handle_ptrs: + self.newline() + write( + self.make_struct_handle_allocations( + struct, self.structs_with_handles[struct] + ), + file=self.outFile + ) + + self.newline() + write('GFXRECON_END_NAMESPACE(decode)', file=self.outFile) + write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) + # Finish processing in superclass Dx12BaseGenerator.endFile(self) @@ -120,7 +140,72 @@ def generate_feature(self): # Functions should not be generated for structs on the blacklist. self.check_blacklist = True - BaseStructHandleMappersBodyGenerator.generate_feature(self) + + platform_type = 'Dx12' + map_types = 'Objects' + map_table = ', const graphics::Dx12GpuVaMap& gpu_va_map' + + for struct in self.get_filtered_struct_names(): + if ( + (struct in self.structs_with_handles) + or (struct in self.GENERIC_HANDLE_STRUCTS) + or (struct in self.structs_with_map_data) + ) and (struct not in self.STRUCT_MAPPERS_BLACKLIST): + handle_members = list() + generic_handle_members = dict() + + if struct in self.structs_with_handles: + handle_members = self.structs_with_handles[struct].copy() + + if struct in self.structs_with_map_data: + handle_members.extend( + self.structs_with_map_data[struct].copy() + ) + + if struct in self.GENERIC_HANDLE_STRUCTS: + generic_handle_members = self.GENERIC_HANDLE_STRUCTS[struct + ] + + # Determine if the struct only contains members that are structs that contain handles or static arrays of handles, + # and does not need a temporary variable referencing the struct value. + needs_value_ptr = False + if generic_handle_members: + needs_value_ptr = True + else: + for member in handle_members: + if ( + ( + self.is_handle(member.base_type) + or self.is_class(member) + ) and + not (member.is_array and not member.is_dynamic) + ) or (member.base_type in self.MAP_STRUCT_TYPE): + needs_value_ptr = True + break + + body = '\n' + body += 'void MapStruct{}(Decoded_{}* wrapper, const {}ObjectInfoTable& object_info_table{})\n'.format( + map_types, struct, platform_type, map_table + ) + body += '{\n' + + if not needs_value_ptr: + body += ' if (wrapper != nullptr)\n' + body += ' {' + else: + body += ' if ((wrapper != nullptr) && (wrapper->decoded_value != nullptr))\n' + body += ' {\n' + body += ' {}* value = wrapper->decoded_value;\n'.format( + struct + ) + + body += self.make_struct_handle_mappings( + struct, handle_members, generic_handle_members + ) + body += ' }\n' + body += '}' + + write(body, file=self.outFile) header_dict = self.source_dict['header_dict'] self.structs_with_objects = self.collect_struct_with_objects( @@ -151,3 +236,233 @@ def write_struct_member_def(self): expr += '}\n' write(expr, file=self.outFile) + + def make_struct_handle_mappings( + self, name, handle_members, generic_handle_members + ): + """Generating expressions for mapping struct handles read from the capture file to handles created at replay.""" + map_types = 'Objects' + map_type = 'Object' + base_type = 'object' + object_info_table_get = '' + given_object = ', gpu_va_map' + + body = '' + for member in handle_members: + body += '\n' + map_func = self.MAP_STRUCT_TYPE.get(member.base_type) + + if map_func: + if member.is_array: + body += ' if ({0})\n'\ + ' {{\n'\ + ' object_mapping::{2}(value->{0}->GetPointer(), {1}, {3});\n'\ + ' }}\n'.format( + member.name, member.array_length, map_func[1], map_func[2] + ) + else: + if member.is_pointer: + body += ' if ({0})\n'\ + ' {{\n'\ + ' object_mapping::{1}(value->{0}->GetPointer(), {2});\n'\ + ' }}\n'.format( + member.name, map_func[0], map_func[2] + ) + else: + body += ' object_mapping::{}(value->{}, {});\n'.format( + map_func[0], member.name, map_func[2] + ) + elif self.is_struct(member.base_type): + # This is a struct that includes handles. + if member.is_array: + body += ' MapStructArray{}(wrapper->{name}->GetMetaStructPointer(), wrapper->{name}->GetLength(), object_info_table{});\n'.format( + map_types, + member.base_type, + given_object, + name=member.name + ) + elif member.is_pointer: + body += ' MapStructArray{}(wrapper->{}->GetMetaStructPointer(), 1, object_info_table{});\n'.format( + map_types, member.base_type, member.name, given_object + ) + else: + body += ' MapStruct{}(wrapper->{}, object_info_table{});\n'.format( + map_types, member.name, given_object + ) + else: + type = member.base_type + + # If it is an array or pointer, map with the utility function. + if (member.is_array or member.pointer_count > 1): + if member.is_dynamic or member.is_pointer: + body += ' value->{name} = {}_mapping::Map{}Array<{type}>(&wrapper->{name}, object_info_table{});\n'.format( + base_type, + map_type, + object_info_table_get, + type=type, + name=member.name + ) + else: + body += ' {}_mapping::Map{}Array<{type}>(&wrapper->{name}, object_info_table{});\n'.format( + base_type, + map_type, + object_info_table_get, + type=type, + name=member.name + ) + else: + body += ' value->{name} = {}_mapping::Map{}<{type}>(wrapper->{name}, object_info_table{});\n'.format( + base_type, + map_type, + object_info_table_get, + type=type, + name=member.name + ) + + for member in generic_handle_members: + body += '\n' + body += ' value->{name} = {}_mapping::Map{}(wrapper->{name}, value->{}, object_info_table);\n'.format( + base_type, + map_type, + generic_handle_members[member], + name=member + ) + + return body + + def make_struct_handle_additions(self, name, members): + """Generating expressions for adding mappings for handles created at replay that are embedded in structs.""" + object_info_table_add = '' + platform_type = 'Dx12' + map_types = 'Objects' + map_type = 'Object' + base_type = 'object' + map_table = ', graphics::Dx12GpuVaMap* gpu_va_map' + + body = 'void AddStruct{}(format::HandleId parent_id, const Decoded_{name}* id_wrapper, const {name}* handle_struct, {}ObjectInfoTable* object_info_table{})\n'.format( + map_types, platform_type, map_table, name=name + ) + body += '{\n' + body += ' if (id_wrapper != nullptr)\n' + body += ' {\n' + + for member in members: + + if self.is_struct(member.base_type): + # This is a struct that includes handles. + if member.is_array: + body += ' AddStructArray{}(parent_id, id_wrapper->{name}->GetMetaStructPointer(), id_wrapper->{name}->GetLength(), handle_struct->{name}, static_cast(handle_struct->{length}), object_info_table);\n'.format( + map_types, + member.base_type, + name=member.name, + length=member.array_length + ) + elif member.is_pointer: + body += ' AddStructArray{}(parent_id, id_wrapper->{name}->GetMetaStructPointer(), 1, handle_struct->{name}, 1, object_info_table);\n'.format( + map_types, member.base_type, name=member.name + ) + else: + body += ' AddStruct{}(parent_id, id_wrapper->{name}, &handle_struct->{name}, object_info_table);\n'.format( + map_types, name=member.name + ) + else: + type = member.base_type + + # If it is an array or pointer, add with the utility function. + if (member.is_array or member.pointer_count > 1): + if member.is_array: + body += ' {}_mapping::Add{}Array<{type}>(parent_id, id_wrapper->{name}.GetPointer(), id_wrapper->{name}.GetLength(), handle_struct->{name}, handle_struct->{length}, object_info_table{});\n'.format( + base_type, + map_type, + object_info_table_add, + type=type, + name=member.name, + length=member.array_length + ) + else: + body += ' {}_mapping::Add{}Array<{type}>(parent_id, id_wrapper->{name}.GetPointer(), 1, handle_struct->{name}, 1, object_info_table{}});\n'.format( + base_type, + map_type, + object_info_table_add, + type=type, + name=member.name + ) + else: + body += ' {}_mapping::Add{}<{type}>(parent_id, id_wrapper->{name}, handle_struct->{name}, object_info_table{});\n'.format( + base_type, + map_type, + object_info_table_add, + type=type, + name=member.name + ) + + body += ' }\n' + body += '}' + return body + + def make_struct_handle_allocations(self, name, members): + """Generate expressions to allocate memory for handles created at replay that are embedded in structs. + Determine if the struct only contains members that are structs that contain handles or static arrays of handles, + and does not need a temporary variable referencing the struct value. + """ + map_type = 'Object' + + needs_value_ptr = False + for member in members: + if self.is_handle( + member.base_type + ) and not (member.is_array and not member.is_dynamic): + needs_value_ptr = True + break + + body = 'void SetStruct{}Lengths(Decoded_{name}* wrapper)\n'.format( + map_type, name=name + ) + body += '{\n' + + if not needs_value_ptr: + body += ' if (wrapper != nullptr)\n' + body += ' {\n' + else: + body += ' if ((wrapper != nullptr) && (wrapper->decoded_value != nullptr))\n' + body += ' {\n' + body += ' {}* value = wrapper->decoded_value;\n'.format( + name + ) + body += '\n' + + for member in members: + if self.is_struct(member.base_type): + # This is a struct that includes handles. + if member.is_array: + body += ' SetStructArray{}Lengths(wrapper->{name}->GetMetaStructPointer(), wrapper->{name}->GetLength());\n'.format( + map_type, member.base_type, name=member.name + ) + elif member.is_pointer: + body += ' SetStructArray{}Lengths(wrapper->{name}->GetMetaStructPointer(), 1);\n'.format( + map_type, member.base_type, name=member.name + ) + else: + body += ' SetStruct{}Lengths(wrapper->{name});\n'.format( + map_type, name=member.name + ) + else: + # If it is an array or pointer, add with the utility function. + if (member.is_array or member.pointer_count > 1): + if member.is_array: + body += ' wrapper->{name}.Set{}Length(wrapper->{name}.GetLength());\n'.format( + map_type, name=member.name + ) + else: + body += ' wrapper->{}.Set{}Length(1);\n'.format( + map_type, member.name + ) + + if member.is_dynamic or member.is_pointer: + body += ' value->{name} = wrapper->{name}.GetHandlePointer();\n'.format( + name=member.name + ) + + body += ' }\n' + body += '}' + return body diff --git a/framework/generated/dx12_generators/dx12_struct_object_mappers_header_generator.py b/framework/generated/dx12_generators/dx12_struct_object_mappers_header_generator.py index d68edfa775..51fe83ad81 100644 --- a/framework/generated/dx12_generators/dx12_struct_object_mappers_header_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_object_mappers_header_generator.py @@ -21,14 +21,10 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator -from base_struct_handle_mappers_header_generator import BaseStructHandleMappersHeaderGenerator +from dx12_base_generator import Dx12BaseGenerator, write -class Dx12StructObjectMappersHeaderGenerator( - BaseStructHandleMappersHeaderGenerator, Dx12BaseGenerator -): +class Dx12StructObjectMappersHeaderGenerator(Dx12BaseGenerator): """Generates C++ functions responsible for Dx12 struct object mappers.""" def __init__( @@ -80,7 +76,21 @@ def generate_feature(self): # Functions should not be generated for structs on the blacklist. self.check_blacklist = True - BaseStructHandleMappersHeaderGenerator.generate_feature(self) + + for struct in self.get_filtered_struct_names(): + if ( + (struct in self.structs_with_handles) + or (struct in self.GENERIC_HANDLE_STRUCTS) + or (struct in self.structs_with_map_data) + ) and (struct not in self.STRUCT_MAPPERS_BLACKLIST): + body = '\n' + body += 'void MapStructObjects(Decoded_{}* wrapper, '.format( + struct + ) + body += 'const Dx12ObjectInfoTable& object_info_table, ' + body += 'const graphics::Dx12GpuVaMap& gpu_va_map);' + write(body, file=self.outFile) + header_dict = self.source_dict['header_dict'] self.structs_with_objects = self.collect_struct_with_objects( header_dict @@ -90,7 +100,106 @@ def generate_feature(self): def endFile(self): """Method override.""" - BaseStructHandleMappersHeaderGenerator.endFile(self) + platform_type = 'Dx12' + map_types = 'Objects' + map_type = 'Object' + map_table = ', const graphics::Dx12GpuVaMap& gpu_va_map' + map_object = ', gpu_va_map' + + self.newline() + write('template ', file=self.outFile) + write( + 'void MapStructArray{}(T* structs, size_t len, const {}ObjectInfoTable& object_info_table{})' + .format(map_types, platform_type, map_table), + file=self.outFile + ) + write('{', file=self.outFile) + write(' if (structs != nullptr)', file=self.outFile) + write(' {', file=self.outFile) + write(' for (size_t i = 0; i < len; ++i)', file=self.outFile) + write(' {', file=self.outFile) + write( + ' MapStruct{}(&structs[i], object_info_table{});'. + format(map_types, map_object), + file=self.outFile + ) + write(' }', file=self.outFile) + write(' }', file=self.outFile) + write('}', file=self.outFile) + self.newline() + + for struct in self.output_structs_with_handles: + write( + 'void AddStruct{}(format::HandleId parent_id, const Decoded_{type}* id_wrapper, const {type}* handle_struct, {}ObjectInfoTable* object_info_table{});' + .format(map_types, platform_type, map_table, type=struct), + file=self.outFile + ) + self.newline() + + write('template ', file=self.outFile) + write( + 'void AddStructArray{}(format::HandleId parent_id, const T* id_wrappers, size_t id_len, const typename T::struct_type* handle_structs, size_t handle_len, {}ObjectInfoTable* object_info_table{})' + .format(map_types, platform_type, map_table), + file=self.outFile + ) + write('{', file=self.outFile) + write( + ' if (id_wrappers != nullptr && handle_structs != nullptr)', + file=self.outFile + ) + write(' {', file=self.outFile) + write( + ' // TODO: Improved handling of array size mismatch.', + file=self.outFile + ) + write( + ' size_t len = std::min(id_len, handle_len);', + file=self.outFile + ) + write(' for (size_t i = 0; i < len; ++i)', file=self.outFile) + write(' {', file=self.outFile) + write( + ' AddStruct{}(parent_id, &id_wrappers[i], &handle_structs[i], object_info_table);' + .format(map_types), + file=self.outFile + ) + write(' }', file=self.outFile) + write(' }', file=self.outFile) + write('}', file=self.outFile) + self.newline() + + for struct in self.output_structs_with_handles: + if struct in self.structs_with_handle_ptrs: + write( + 'void SetStruct{map_type}Lengths(Decoded_{type}* wrapper);' + .format(map_type=map_type, type=struct), + file=self.outFile + ) + self.newline() + + write('template ', file=self.outFile) + write( + 'void SetStructArray{}Lengths(T* wrappers, size_t len)'. + format(map_type), + file=self.outFile + ) + write('{', file=self.outFile) + write(' if (wrappers != nullptr)', file=self.outFile) + write(' {', file=self.outFile) + write(' for (size_t i = 0; i < len; ++i)', file=self.outFile) + write(' {', file=self.outFile) + write( + ' SetStruct{}Lengths(&wrappers[i]);'.format(map_type), + file=self.outFile + ) + write(' }', file=self.outFile) + write(' }', file=self.outFile) + write('}', file=self.outFile) + self.newline() + + write('GFXRECON_END_NAMESPACE(decode)', file=self.outFile) + write('GFXRECON_END_NAMESPACE(gfxrecon)', file=self.outFile) + # Finish processing in superclass Dx12BaseGenerator.endFile(self) diff --git a/framework/generated/dx12_generators/dx12_struct_unwrappers_body_generator.py b/framework/generated/dx12_generators/dx12_struct_unwrappers_body_generator.py index 0e7f3a4653..59cf27c67b 100644 --- a/framework/generated/dx12_generators/dx12_struct_unwrappers_body_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_unwrappers_body_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates functions to unwrap struct members with a COM object type diff --git a/framework/generated/dx12_generators/dx12_struct_unwrappers_header_generator.py b/framework/generated/dx12_generators/dx12_struct_unwrappers_header_generator.py index e85dd276c9..66eaaf0815 100644 --- a/framework/generated/dx12_generators/dx12_struct_unwrappers_header_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_unwrappers_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates declarations for functions to unwrap struct members with a diff --git a/framework/generated/dx12_generators/dx12_struct_wrapper_body_generator.py b/framework/generated/dx12_generators/dx12_struct_wrapper_body_generator.py index ab0a9074cf..a68050f5b7 100644 --- a/framework/generated/dx12_generators/dx12_struct_wrapper_body_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_wrapper_body_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write class Dx12StructWrapperBodyGenerator(Dx12BaseGenerator): diff --git a/framework/generated/dx12_generators/dx12_struct_wrapper_header_generator.py b/framework/generated/dx12_generators/dx12_struct_wrapper_header_generator.py index 437e985a1e..2df7a6c214 100644 --- a/framework/generated/dx12_generators/dx12_struct_wrapper_header_generator.py +++ b/framework/generated/dx12_generators/dx12_struct_wrapper_header_generator.py @@ -20,8 +20,7 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write class Dx12StructWrapperHeaderGenerator(Dx12BaseGenerator): diff --git a/framework/generated/dx12_generators/dx12_wrapper_body_generator.py b/framework/generated/dx12_generators/dx12_wrapper_body_generator.py index 115ed8986c..84d883b2a8 100644 --- a/framework/generated/dx12_generators/dx12_wrapper_body_generator.py +++ b/framework/generated/dx12_generators/dx12_wrapper_body_generator.py @@ -23,8 +23,7 @@ import sys import json -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions +from dx12_base_generator import Dx12BaseGenerator, Dx12GeneratorOptions, write class Dx12WrapperBodyGeneratorOptions(Dx12GeneratorOptions): diff --git a/framework/generated/dx12_generators/dx12_wrapper_creators_body_generator.py b/framework/generated/dx12_generators/dx12_wrapper_creators_body_generator.py index f7645e6339..53db3f7be1 100644 --- a/framework/generated/dx12_generators/dx12_wrapper_creators_body_generator.py +++ b/framework/generated/dx12_generators/dx12_wrapper_creators_body_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates functions to create wrappers for DX12 capture based on IID. diff --git a/framework/generated/dx12_generators/dx12_wrapper_creators_header_generator.py b/framework/generated/dx12_generators/dx12_wrapper_creators_header_generator.py index 1831da2aac..18f351b878 100644 --- a/framework/generated/dx12_generators/dx12_wrapper_creators_header_generator.py +++ b/framework/generated/dx12_generators/dx12_wrapper_creators_header_generator.py @@ -21,8 +21,7 @@ # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates declarations for functions to create wrappers for DX12 capture diff --git a/framework/generated/dx12_generators/dx12_wrapper_header_generator.py b/framework/generated/dx12_generators/dx12_wrapper_header_generator.py index 84de9961b0..c0717a0d1e 100644 --- a/framework/generated/dx12_generators/dx12_wrapper_header_generator.py +++ b/framework/generated/dx12_generators/dx12_wrapper_header_generator.py @@ -20,8 +20,7 @@ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import sys -from base_generator import write -from dx12_base_generator import Dx12BaseGenerator +from dx12_base_generator import Dx12BaseGenerator, write # Generates function/class wrappers for DX12 capture. diff --git a/framework/generated/dx12_generators/gencode.py b/framework/generated/dx12_generators/gencode.py index c7fdce324b..6bce8c05d4 100644 --- a/framework/generated/dx12_generators/gencode.py +++ b/framework/generated/dx12_generators/gencode.py @@ -24,8 +24,7 @@ import sys # API Call Encoders -from base_generator import write -from dx12_base_generator import Dx12GeneratorOptions +from dx12_base_generator import Dx12GeneratorOptions, write from dx12_api_call_encoders_header_generator import Dx12ApiCallEncodersHeaderGenerator from dx12_api_call_encoders_body_generator import Dx12ApiCallEncodersBodyGenerator from dx12_struct_decoders_forward_generator import Dx12StructDecodersForwardGenerator diff --git a/framework/generated/generate_dx12.py b/framework/generated/generate_dx12.py index dd33791055..46628f2d38 100644 --- a/framework/generated/generate_dx12.py +++ b/framework/generated/generate_dx12.py @@ -27,7 +27,6 @@ GENERATOR_PATH = './dx12_generators' LIB_CPPHEADERPARSER_PATH = '../../external' LIB_REGISTRY_PATH = '../../external/Vulkan-Headers/registry' -VULKAN_GENERATOR_PATH = './vulkan_generators' BASE_GENERATOR_PATH = './base_generators' SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -115,9 +114,6 @@ BASE_GENERATOR_DIR = os.path.normpath( os.path.join(CURRENT_DIR, BASE_GENERATOR_PATH) ) - VULKAN_GENERATOR_DIR = os.path.normpath( - os.path.join(CURRENT_DIR, VULKAN_GENERATOR_PATH) - ) LIB_REGISTRY_DIR = os.path.normpath( os.path.join(CURRENT_DIR, LIB_REGISTRY_PATH) ) @@ -127,7 +123,6 @@ sys.path.append(GENERATOR_DIR) sys.path.append(BASE_GENERATOR_DIR) - sys.path.append(VULKAN_GENERATOR_DIR) sys.path.append(LIB_REGISTRY_DIR) sys.path.append(LIB_CPPHEADERPARSER_DIR) diff --git a/framework/generated/vulkan_generators/base_generator.py b/framework/generated/vulkan_generators/base_generator.py index dc5fcf4567..8ccda5e735 100644 --- a/framework/generated/vulkan_generators/base_generator.py +++ b/framework/generated/vulkan_generators/base_generator.py @@ -1092,7 +1092,7 @@ def check_struct_member_handles( elif union_info.base_type in self.MAP_STRUCT_TYPE: if (structs_with_map_data is not None): map_data.append(value) - elif ('pNext' in value.name) and (not self.is_dx12_class()): + elif ('pNext' in value.name): # The pNext chain may include a struct with handles. has_pnext_handles, has_pnext_handle_ptrs = self.check_struct_pnext_handles( typename @@ -1258,10 +1258,7 @@ def make_decoded_param_type(self, value): count = value.pointer_count if self.is_struct(type_name): - if ( - self.is_dx12_class() and - (value.array_dimension and value.array_dimension == 1) - ) or (not self.is_dx12_class() and count > 1): + if count > 1: type_name = 'StructPointerDecoder'.format( type_name ) @@ -1286,14 +1283,8 @@ def make_decoded_param_type(self, value): type_name = 'StringDecoder' elif type_name == 'void': if value.is_array: - if (self.is_dx12_class() and count > 1): - # If this was a pointer to memory (void**) allocated internally by the implementation, it was encoded as - # an array of bytes but must be retrieved as a pointer to a memory allocation. For this case, the array - # length value defines the size of the memory referenced by the single retrieved pointer. - type_name = 'PointerDecoder' - else: - # If this was an array (void*) it was encoded as an array of bytes. - type_name = 'PointerDecoder' + # If this was an array (void*) it was encoded as an array of bytes. + type_name = 'PointerDecoder' elif count > 1: # If this was a pointer to a pointer to an unknown object (void**), it was encoded as a pointer to a 64-bit address value. # So, we specify uint64_t as the decode type and void* as the type to be used for Vulkan API call output parameters. @@ -1341,23 +1332,10 @@ def make_consumer_func_decl( param_decls.append(param_decl) if return_type != 'void': - if self.is_dx12_class(): - method_name = name[name.find('::Process_') + 10:] - return_value = self.get_return_value_info( - return_type, method_name - ) - rtn_type1 = self.make_decoded_param_type(return_value) - if rtn_type1.find('Decoder') != -1: - rtn_type1 += '*' - param_decl = self.make_aligned_param_decl( - rtn_type1, 'return_value', self.INDENT_SIZE, - self.genOpts.align_func_param - ) - else: - param_decl = self.make_aligned_param_decl( - return_type, 'returnValue', self.INDENT_SIZE, - self.genOpts.align_func_param - ) + param_decl = self.make_aligned_param_decl( + return_type, 'returnValue', self.INDENT_SIZE, + self.genOpts.align_func_param + ) param_decls.append(param_decl) for value in values: @@ -1542,15 +1520,10 @@ def make_encoder_method_call( handle_type_name += self.get_generic_cmd_handle_type_value( name, value.name ) - if self.is_dx12_class(): - arg_name = 'GetDx12WrappedId({}, {})'.format( - arg_name, handle_type_name - ) - else: - wrapper = self.get_wrapper_prefix_from_type() - arg_name = '{}::GetWrappedId({}, {})'.format( - wrapper, arg_name, handle_type_name - ) + wrapper = self.get_wrapper_prefix_from_type() + arg_name = '{}::GetWrappedId({}, {})'.format( + wrapper, arg_name, handle_type_name + ) args = [arg_name] @@ -1615,9 +1588,6 @@ def make_encoder_method_call( return '{}({})'.format(method_call, ', '.join(args)) - def is_dx12_class(self): - return True if ('Dx12' in self.__class__.__name__) else False - def is_openxr_class(self): return True if ('OpenXr' in self.__class__.__name__) else False diff --git a/framework/generated/vulkan_generators/vulkan_cpp_consumer_body_generator.py b/framework/generated/vulkan_generators/vulkan_cpp_consumer_body_generator.py index cc8b021919..0a970934c9 100644 --- a/framework/generated/vulkan_generators/vulkan_cpp_consumer_body_generator.py +++ b/framework/generated/vulkan_generators/vulkan_cpp_consumer_body_generator.py @@ -648,8 +648,6 @@ def need_feature_generation(self): def generate_feature(self): """Performs C++ code generation for the feature.""" platform_type = 'Vulkan' - if self.is_dx12_class(): - platform_type = 'Dx12' first = True cmdnames = self.get_filtered_cmd_names() From 2902a310bb65272448a4ebd513c6cb94accb93be Mon Sep 17 00:00:00 2001 From: Rodrigo Urra Date: Mon, 28 Oct 2024 15:45:10 -0400 Subject: [PATCH 45/70] Have AGS device creation honor desired adapter (#1833) --- framework/decode/custom_ags_replay_consumer.cpp | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/framework/decode/custom_ags_replay_consumer.cpp b/framework/decode/custom_ags_replay_consumer.cpp index 0142a11810..f4d04f6924 100644 --- a/framework/decode/custom_ags_replay_consumer.cpp +++ b/framework/decode/custom_ags_replay_consumer.cpp @@ -180,14 +180,12 @@ void AgsReplayConsumer::Process_agsDriverExtensionsDX12_CreateDevice( if (ValidateAgsInputs(context)) { - IDXGIAdapter* current_adapter = nullptr; - - current_adapter = dx12_replay_consumer_->MapObject( - reinterpret_cast(creationParams->pAdapter)); + IDXGIAdapter* current_adapter = dx12_replay_consumer_->GetAdapter(); if (current_adapter == nullptr) { - current_adapter = dx12_replay_consumer_->GetAdapter(); + current_adapter = dx12_replay_consumer_->MapObject( + reinterpret_cast(creationParams->pAdapter)); } creationParams->pAdapter = current_adapter; From 82d0f99addb2adb1e0d321b42d1b91f29a9c203e Mon Sep 17 00:00:00 2001 From: Rodrigo Urra Date: Mon, 28 Oct 2024 15:48:09 -0400 Subject: [PATCH 46/70] Have info tool expose SubSysId during GPU enumeration (#1834) --- tools/info/main.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tools/info/main.cpp b/tools/info/main.cpp index 5f9667c545..e085cfda92 100644 --- a/tools/info/main.cpp +++ b/tools/info/main.cpp @@ -1,6 +1,6 @@ /* ** Copyright (c) 2020-2024 LunarG, Inc. -** Copyright (c) 2022-2023 Advanced Micro Devices, Inc. All rights reserved. +** Copyright (c) 2022-2024 Advanced Micro Devices, Inc. All rights reserved. ** ** Permission is hereby granted, free of charge, to any person obtaining a ** copy of this software and associated documentation files (the "Software"), @@ -694,7 +694,7 @@ static bool CheckOptionEnumGpuIndices(const char* exe_name, const gfxrecon::util gfxrecon::graphics::dx12::ActiveAdapterMap adapters{}; gfxrecon::graphics::dx12::TrackAdapters(result, reinterpret_cast(&factory1), adapters); - GFXRECON_WRITE_CONSOLE("GPU index\tGPU name"); + GFXRECON_WRITE_CONSOLE("GPU index\tGPU name\tSubSys ID"); for (size_t index = 0; index < adapters.size(); ++index) { for (auto adapter : adapters) @@ -704,7 +704,10 @@ static bool CheckOptionEnumGpuIndices(const char* exe_name, const gfxrecon::util std::string replay_adapter_str = gfxrecon::util::WCharArrayToString(adapter.second.internal_desc.Description); - GFXRECON_WRITE_CONSOLE("%-9x\t%s", adapter.second.adapter_idx, replay_adapter_str.c_str()); + GFXRECON_WRITE_CONSOLE("%-9x\t%s\t%u", + adapter.second.adapter_idx, + replay_adapter_str.c_str(), + adapter.second.internal_desc.SubSysId); adapter.second.adapter->Release(); break; } From 0aef135c055b7b9bbb9675f92454153b260a59ee Mon Sep 17 00:00:00 2001 From: davidd-lunarg <73848817+davidd-lunarg@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:19:30 -0600 Subject: [PATCH 47/70] Add DirectX Shader Compiler dependency --- CMakeLists.txt | 9 +++ LICENSE_ThirdParty.txt | 70 +++++++++++++++++++++ cmake/FindAGS.cmake | 2 +- cmake/FindDXC.cmake | 72 ++++++++++++++++++++++ cmake/InstallDXC.cmake | 19 ++++++ framework/decode/CMakeLists.txt | 8 +-- framework/encode/CMakeLists.txt | 2 +- framework/format/CMakeLists.txt | 2 +- framework/graphics/CMakeLists.txt | 4 +- framework/graphics/dx12_image_renderer.cpp | 1 - framework/util/CMakeLists.txt | 3 +- layer/d3d12/CMakeLists.txt | 2 +- layer/d3d12_capture/CMakeLists.txt | 2 +- tools/optimize/CMakeLists.txt | 4 +- tools/replay/CMakeLists.txt | 4 +- 15 files changed, 188 insertions(+), 16 deletions(-) create mode 100644 cmake/FindDXC.cmake create mode 100644 cmake/InstallDXC.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 74b737c41c..a9f3a40814 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -171,6 +171,7 @@ if(MSVC) add_definitions(-D_DISABLE_CONSTEXPR_MUTEX_CONSTRUCTOR) if(${D3D12_SUPPORT}) + set(D3D12_INCLUDE_DIRECTORIES "${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc") add_definitions(-DD3D12_SUPPORT) # Check Windows SDK version and print warning if there is a mismatch. @@ -182,6 +183,12 @@ if(MSVC) "set D3D12_SUPPORT=OFF or configure the build with the recommended Windows SDK version. See BUILD.md " "for more information.") endif() + + find_package(DXC) + if (DXC_FOUND) + set(D3D12_INCLUDE_DIRECTORIES ${D3D12_INCLUDE_DIRECTORIES} ${DXC_INCLUDE_DIR}) + add_definitions(-DGFXRECON_DXC_SUPPORT) + endif() else() set(BUILD_LAUNCHER_AND_INTERCEPTOR OFF) @@ -202,6 +209,8 @@ if(MSVC) if (AGS_FOUND) add_definitions(-DGFXRECON_AGS_SUPPORT) + set(D3D12_INCLUDE_DIRECTORIES ${D3D12_INCLUDE_DIRECTORIES} ${AGS_INCLUDE_DIR}) + # The value for option GFXRECON_AGS_SUPPORT gets cached so use a non-cached variable # to determine the final result. set(GFXRECON_AGS_SUPPORT_FINAL ON) diff --git a/LICENSE_ThirdParty.txt b/LICENSE_ThirdParty.txt index 4adbfb093b..520ee8cf8e 100644 --- a/LICENSE_ThirdParty.txt +++ b/LICENSE_ThirdParty.txt @@ -408,3 +408,73 @@ EFFET JURIDIQUE. Le présent contrat décrit certains droits juridiques. Vous pourriez avoir d’autres droits prévus par les lois de votre pays. Le présent contrat ne modifie pas les droits que vous confèrent les lois de votre pays si celles-ci ne le permettent pas. + +## DirectX Shader Compiler - d3d12shader.h + +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## DirectX Shader Compiler - all other files + +============================================================================== +LLVM Release License +============================================================================== +University of Illinois/NCSA +Open Source License + +Copyright (c) 2003-2015 University of Illinois at Urbana-Champaign. +All rights reserved. + +Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal with +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at + Urbana-Champaign, nor the names of its contributors may be used to + endorse or promote products derived from this Software without specific + prior written permission. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE +SOFTWARE. diff --git a/cmake/FindAGS.cmake b/cmake/FindAGS.cmake index c6b96794f7..99d496216a 100644 --- a/cmake/FindAGS.cmake +++ b/cmake/FindAGS.cmake @@ -21,7 +21,7 @@ function(FindAgsLibrary AGS_SEARCH_PATH AGS_LIBRARY_NAME AGS_LIBRARY_VAR) unset(${AGS_LIBRARY_VAR} CACHE) endif() endif() - find_library(${AGS_LIBRARY_VAR} NAMES ${AGS_LIBRARY_NAME} PATHS ${AGS_SEARCH_PATH} PATH_SUFFIXES lib) + find_library(${AGS_LIBRARY_VAR} NAMES ${AGS_LIBRARY_NAME} PATHS ${AGS_SEARCH_PATH} PATH_SUFFIXES lib NO_DEFAULT_PATH) endfunction() # Find the build architecture. diff --git a/cmake/FindDXC.cmake b/cmake/FindDXC.cmake new file mode 100644 index 0000000000..e893d49c1b --- /dev/null +++ b/cmake/FindDXC.cmake @@ -0,0 +1,72 @@ +# FindDXC +# ----------- +# +# Find the DXC includes and library. +# +# This module defines the following variables: +# +# DXC_FOUND : True if DXC was found. +# DXC_INCLUDE_DIR : The location of the DXC header file. +# DXC_LIBRARY_PATH : Location of the DXC library. +# DXC_DLL_PATH : Location of the DXC DLL. + +if (${D3D12_SUPPORT}) + + # Find the build architecture. + set(DXC_ARCH "") + if(CMAKE_GENERATOR_PLATFORM STREQUAL "ARM64") + set(DXC_ARCH "arm64") + elseif(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(DXC_ARCH "x64") + elseif(CMAKE_SIZEOF_VOID_P EQUAL 4) + set(DXC_ARCH "x86") + endif() + + if(DXC_ARCH) + + set(DXC_SDK_DIR "${CMAKE_BINARY_DIR}/external/DXC") + set(DXC_SDK_URL "https://github.com/microsoft/DirectXShaderCompiler/releases/download/v1.8.2407/dxc_2024_07_31.zip") + + # Suppress warning on newer versions of CMake related to FetchContent file timestamp behavior. + if (${CMAKE_VERSION} VERSION_GREATER "3.24") + cmake_policy(SET CMP0135 NEW) + endif() + + message(STATUS "Fetching DXC files from ${DXC_SDK_URL}") + include(FetchContent) + FetchContent_Declare( + DXC_SDK + URL ${DXC_SDK_URL} + SOURCE_DIR ${DXC_SDK_DIR} + ) + FetchContent_MakeAvailable(DXC_SDK) + + if(DEFINED CACHE{DXC_INCLUDE_DIR} AND NOT EXISTS "${DXC_INCLUDE_DIR}/dxcapi.h") + message("Current DXC_INCLUDE_DIR '${DXC_INCLUDE_DIR}' does not contain 'dxcapi.h'. Resetting DXC_INCLUDE_DIR.") + unset(DXC_INCLUDE_DIR CACHE) + endif() + find_path(DXC_INCLUDE_DIR NAME "dxcapi.h" PATHS "${DXC_SDK_DIR}/inc") + mark_as_advanced(DXC_INCLUDE_DIR) + + # If the cached library path doesn't exist or arch doesn't match, unset variable. + if(DEFINED CACHE{DXC_LIBRARY_PATH} AND NOT EXISTS "${DXC_LIBRARY_PATH}") + message("Current DXC_LIBRARY_PATH '${DXC_LIBRARY_PATH}' does not exist. Resetting DXC_LIBRARY_PATH.") + unset(DXC_LIBRARY_PATH CACHE) + endif() + find_library(DXC_LIBRARY_PATH NAME "dxcompiler.lib" PATHS "${DXC_SDK_DIR}/lib/${DXC_ARCH}" NO_DEFAULT_PATH) + mark_as_advanced(DXC_LIBRARY_PATH) + + # If the cached DLL path doesn't exist or arch doesn't match, unset variable. + if(DEFINED CACHE{DXC_DLL_PATH} AND NOT EXISTS "${DXC_DLL_PATH}") + message("Current DXC_DLL_PATH '${DXC_DLL_PATH}' does not exist. Resetting DXC_DLL_PATH.") + unset(DXC_DLL_PATH CACHE) + endif() + find_file(DXC_DLL_PATH NAME "dxcompiler.dll" PATHS "${DXC_SDK_DIR}/bin/${DXC_ARCH}" NO_DEFAULT_PATH) + mark_as_advanced(DXC_DLL_PATH) + + endif() + + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(DXC REQUIRED_VARS DXC_LIBRARY_PATH DXC_DLL_PATH DXC_INCLUDE_DIR) + +endif() # D3D12_SUPPORT diff --git a/cmake/InstallDXC.cmake b/cmake/InstallDXC.cmake new file mode 100644 index 0000000000..a0de9c315c --- /dev/null +++ b/cmake/InstallDXC.cmake @@ -0,0 +1,19 @@ +if (${D3D12_SUPPORT} AND "${DXC_FOUND}") + + # Setup dst folders for each build config + set(DXC_DST_DBG ${CMAKE_CURRENT_BINARY_DIR}/Debug) + set(DXC_DST_REL ${CMAKE_CURRENT_BINARY_DIR}/Release) + set(DXC_DST_REL_WITH_DBG_INFO ${CMAKE_CURRENT_BINARY_DIR}/RelWithDebInfo) + set(DXC_DST_MIN_SIZE_REL ${CMAKE_CURRENT_BINARY_DIR}/MinSizeRel) + set(DXC_DST_INSTALL ${CMAKE_INSTALL_BINDIR}) + + # Copy dxcompiler.dll to dst build folder + file(COPY ${DXC_DLL_PATH} DESTINATION ${DXC_DST_DBG}) + file(COPY ${DXC_DLL_PATH} DESTINATION ${DXC_DST_REL}) + file(COPY ${DXC_DLL_PATH} DESTINATION ${DXC_DST_REL_WITH_DBG_INFO}) + file(COPY ${DXC_DLL_PATH} DESTINATION ${DXC_DST_MIN_SIZE_REL}) + + # Copy dxcompiler.dll to install folder + install(FILES ${DXC_DLL_PATH} DESTINATION ${DXC_DST_INSTALL}) + +endif() diff --git a/framework/decode/CMakeLists.txt b/framework/decode/CMakeLists.txt index 86e2b982f5..29a910fc37 100644 --- a/framework/decode/CMakeLists.txt +++ b/framework/decode/CMakeLists.txt @@ -286,7 +286,7 @@ if (WIN32) ${CMAKE_SOURCE_DIR}/framework ${CMAKE_SOURCE_DIR}/external ${CMAKE_SOURCE_DIR}/external/precompiled/win64/include/ - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) else() target_include_directories(gfxrecon_decode PUBLIC @@ -294,7 +294,7 @@ if (WIN32) ${CMAKE_SOURCE_DIR}/framework ${CMAKE_SOURCE_DIR}/external ${CMAKE_SOURCE_DIR}/external/precompiled/win32/include/ - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) endif() elseif(APPLE) target_include_directories(gfxrecon_decode @@ -305,7 +305,7 @@ elseif(APPLE) ${CMAKE_SOURCE_DIR}/framework ${CMAKE_SOURCE_DIR}/external ${CMAKE_SOURCE_DIR}/external/precompiled/macos/include/ - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) else() target_include_directories(gfxrecon_decode PRIVATE @@ -315,7 +315,7 @@ else() ${CMAKE_SOURCE_DIR}/framework ${CMAKE_SOURCE_DIR}/external ${CMAKE_SOURCE_DIR}/external/precompiled/linux/include/ - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) endif() if (CMAKE_SIZEOF_VOID_P EQUAL 8) diff --git a/framework/encode/CMakeLists.txt b/framework/encode/CMakeLists.txt index c615b545e8..e540117155 100644 --- a/framework/encode/CMakeLists.txt +++ b/framework/encode/CMakeLists.txt @@ -143,7 +143,7 @@ target_include_directories(gfxrecon_encode PUBLIC ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR}/framework - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) target_link_libraries(gfxrecon_encode gfxrecon_graphics gfxrecon_format gfxrecon_util vulkan_registry platform_specific) diff --git a/framework/format/CMakeLists.txt b/framework/format/CMakeLists.txt index 995513386f..4a58e7badc 100644 --- a/framework/format/CMakeLists.txt +++ b/framework/format/CMakeLists.txt @@ -42,7 +42,7 @@ target_sources(gfxrecon_format target_include_directories(gfxrecon_format PUBLIC ${CMAKE_SOURCE_DIR}/framework - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) target_link_libraries(gfxrecon_format gfxrecon_util vulkan_registry platform_specific) diff --git a/framework/graphics/CMakeLists.txt b/framework/graphics/CMakeLists.txt index 46ee223be3..f69ee7c3ca 100644 --- a/framework/graphics/CMakeLists.txt +++ b/framework/graphics/CMakeLists.txt @@ -62,9 +62,9 @@ target_include_directories(gfxrecon_graphics ${CMAKE_SOURCE_DIR}/build ${CMAKE_SOURCE_DIR}/framework ${CMAKE_SOURCE_DIR}/external - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) -target_link_libraries(gfxrecon_graphics gfxrecon_util vulkan_registry platform_specific $<$:D3DCompiler.lib>) +target_link_libraries(gfxrecon_graphics gfxrecon_util vulkan_registry platform_specific) common_build_directives(gfxrecon_graphics) diff --git a/framework/graphics/dx12_image_renderer.cpp b/framework/graphics/dx12_image_renderer.cpp index db4713b755..900eb4fb30 100644 --- a/framework/graphics/dx12_image_renderer.cpp +++ b/framework/graphics/dx12_image_renderer.cpp @@ -22,7 +22,6 @@ #include "graphics/dx12_image_renderer.h" -#include #include GFXRECON_BEGIN_NAMESPACE(gfxrecon) diff --git a/framework/util/CMakeLists.txt b/framework/util/CMakeLists.txt index 945827d794..0137b22089 100644 --- a/framework/util/CMakeLists.txt +++ b/framework/util/CMakeLists.txt @@ -121,7 +121,7 @@ target_sources(gfxrecon_util target_include_directories(gfxrecon_util PUBLIC ${CMAKE_SOURCE_DIR}/framework - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) target_link_libraries(gfxrecon_util platform_specific vulkan_registry spirv_registry spirv-reflect-static nlohmann_json::nlohmann_json ${CMAKE_DL_LIBS}) target_link_libraries(gfxrecon_util $<$:version.lib>) @@ -167,7 +167,6 @@ if (WAYLAND_LIBRARY) endif() if (${GFXRECON_AGS_SUPPORT_FINAL}) - target_include_directories(gfxrecon_util PUBLIC ${AGS_INCLUDE_DIR}) target_link_libraries(gfxrecon_util optimized ${AGS_LIBRARY_RELEASE}) target_link_libraries(gfxrecon_util debug ${AGS_LIBRARY_DEBUG}) endif() diff --git a/layer/d3d12/CMakeLists.txt b/layer/d3d12/CMakeLists.txt index 771930bff0..c15967d6e2 100644 --- a/layer/d3d12/CMakeLists.txt +++ b/layer/d3d12/CMakeLists.txt @@ -39,7 +39,7 @@ target_include_directories(d3d12 PUBLIC ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR} - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) target_link_libraries(d3d12 gfxrecon_encode gfxrecon_util ${LINK_D3D12_OUTPUT}) common_build_directives(d3d12) diff --git a/layer/d3d12_capture/CMakeLists.txt b/layer/d3d12_capture/CMakeLists.txt index d834d3f989..6ee8819a98 100644 --- a/layer/d3d12_capture/CMakeLists.txt +++ b/layer/d3d12_capture/CMakeLists.txt @@ -36,7 +36,7 @@ target_include_directories(d3d12_capture PUBLIC ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR} - $<$:${CMAKE_SOURCE_DIR}/external/AgilitySDK/inc>) + $<$:${D3D12_INCLUDE_DIRECTORIES}>) target_link_libraries(d3d12_capture gfxrecon_encode gfxrecon_format gfxrecon_util platform_specific) common_build_directives(d3d12_capture) diff --git a/tools/optimize/CMakeLists.txt b/tools/optimize/CMakeLists.txt index b877666e9e..0d1e53f09c 100644 --- a/tools/optimize/CMakeLists.txt +++ b/tools/optimize/CMakeLists.txt @@ -65,10 +65,12 @@ target_link_libraries(gfxrecon-optimize gfxrecon_util platform_specific $<$:d3d12.lib> - $<$:dxgi.lib>) + $<$:dxgi.lib> + $<$:${DXC_LIBRARY_PATH}>) common_build_directives(gfxrecon-optimize) include(${CMAKE_SOURCE_DIR}/cmake/AgilitySDK.cmake) +include(${CMAKE_SOURCE_DIR}/cmake/InstallDXC.cmake) install(TARGETS gfxrecon-optimize RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) diff --git a/tools/replay/CMakeLists.txt b/tools/replay/CMakeLists.txt index 0ea8b924b6..01430b1c59 100644 --- a/tools/replay/CMakeLists.txt +++ b/tools/replay/CMakeLists.txt @@ -48,7 +48,8 @@ target_link_libraries(gfxrecon-replay gfxrecon_util platform_specific $<$:d3d12.lib> - $<$:dxgi.lib>) + $<$:dxgi.lib> + $<$:${DXC_LIBRARY_PATH}>) if (MSVC) # Force inclusion of "gfxrecon_disable_popup_result" variable in linking. @@ -70,5 +71,6 @@ target_link_libraries(gfxrecon-replay Threads::Threads) common_build_directives(gfxrecon-replay) include(${CMAKE_SOURCE_DIR}/cmake/AgilitySDK.cmake) +include(${CMAKE_SOURCE_DIR}/cmake/InstallDXC.cmake) install(TARGETS gfxrecon-replay RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) From 76aa9cf7e0ae2d2909931497ad277c1429a5adc0 Mon Sep 17 00:00:00 2001 From: davidd-lunarg <73848817+davidd-lunarg@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:34:50 -0700 Subject: [PATCH 48/70] Use reflection to get DXIL library shader exports --- .../decode/dx12_resource_value_mapper.cpp | 118 +++++++++++++++++- framework/graphics/dx12_util.h | 14 +++ 2 files changed, 129 insertions(+), 3 deletions(-) diff --git a/framework/decode/dx12_resource_value_mapper.cpp b/framework/decode/dx12_resource_value_mapper.cpp index 71088a59ac..52964b97cb 100644 --- a/framework/decode/dx12_resource_value_mapper.cpp +++ b/framework/decode/dx12_resource_value_mapper.cpp @@ -1,5 +1,5 @@ /* -** Copyright (c) 2022 LunarG, Inc. +** Copyright (c) 2022-2024 LunarG, Inc. ** Copyright (c) 2022-2023 Advanced Micro Devices, Inc. All rights reserved. ** ** Permission is hereby granted, free of charge, to any person obtaining a @@ -27,6 +27,13 @@ #include "decode/dx12_experimental_resource_value_tracker.h" #include "decode/dx12_object_mapping_util.h" +#if defined(GFXRECON_DXC_SUPPORT) +#include +#include +#endif + +#include + GFXRECON_BEGIN_NAMESPACE(gfxrecon) GFXRECON_BEGIN_NAMESPACE(decode) @@ -141,6 +148,30 @@ void CopyMappedResourceValuesFromSrcToDst(std::map& } } +std::string DemangleDxilExportName(const std::string& mangled_name) +{ + size_t demangled_name_start = mangled_name.find_first_of("?"); + size_t demangled_name_end = mangled_name.find_first_of("@"); + + std::string demangled_name = ""; + if (demangled_name_start != std::string::npos && demangled_name_end != std::string::npos) + { + // The char after '?' is the first char of the unmangled name so increment start pos. + ++demangled_name_start; + demangled_name = mangled_name.substr(demangled_name_start, demangled_name_end - demangled_name_start); + } + + if (!demangled_name.empty()) + { + GFXRECON_LOG_DEBUG("Found demangled DXIL export name '%s'.", demangled_name.c_str()); + } + else + { + GFXRECON_LOG_WARNING("Failed to demangle DXIL export name '%s'.", mangled_name.c_str()); + } + return demangled_name; +} + } // namespace Dx12ResourceValueMapper::Dx12ResourceValueMapper(std::function get_object_info_func, @@ -1690,7 +1721,8 @@ void Dx12ResourceValueMapper::GetStateObjectLrsAssociationInfo( } else if (subobject_type == D3D12_STATE_SUBOBJECT_TYPE_DXIL_LIBRARY) { - // TODO: Parse local root signatures and their shader associations from the DXIL library. + // TODO: DXIL libraries can also contain local root signature definitions as well as subobject associations. + // That information should be parsed here as well to ensure correct LRS handling. GFXRECON_LOG_DEBUG_ONCE("A state object is being created with a DXIL library subobject. Some usages of " "DXIL library subobjects may not be fully supported by GFXR replay."); @@ -1699,10 +1731,90 @@ void Dx12ResourceValueMapper::GetStateObjectLrsAssociationInfo( auto num_exports = dxil_lib_desc_decoder->GetPointer()->NumExports; if (num_exports == 0) { - // TODO: Parse the names of all shaders exported from the DXIL library. +#if defined(GFXRECON_DXC_SUPPORT) + // If D3D12_DXIL_LIBRARY_DESC::NumExports == 0, everything in the DXIL library is exported. Use + // reflection to get the list of exported shader names. + HRESULT hr; + graphics::dx12::IDxcUtilsComPtr dxc_utils = nullptr; + hr = DxcCreateInstance(CLSID_DxcUtils, IID_PPV_ARGS(&dxc_utils)); + if (SUCCEEDED(hr)) + { + graphics::dx12::IDxcContainerReflectionComPtr dxc_container_reflection = nullptr; + DxcCreateInstance(CLSID_DxcContainerReflection, IID_PPV_ARGS(&dxc_container_reflection)); + if (SUCCEEDED(hr)) + { + // Create a DXC blob from the DXIL library's bytes. + graphics::dx12::IDxcBlobEncodingComPtr dxc_blob_encoding; + hr = dxc_utils->CreateBlobFromPinned( + dxil_lib_desc_decoder->GetPointer()->DXILLibrary.pShaderBytecode, + dxil_lib_desc_decoder->GetPointer()->DXILLibrary.BytecodeLength, + DXC_CP_ACP, + &dxc_blob_encoding); + if (SUCCEEDED(hr)) + { + // Load the DXIL library blob into the container reflection object. + hr = dxc_container_reflection->Load(dxc_blob_encoding); + if (SUCCEEDED(hr)) + { + // Get the DXIL library reflection object. + UINT32 dxil_part; + hr = dxc_container_reflection->FindFirstPartKind(DXC_PART_DXIL, &dxil_part); + if (SUCCEEDED(hr)) + { + graphics::dx12::ID3D12LibraryReflectionComPtr library_reflection; + hr = dxc_container_reflection->GetPartReflection(dxil_part, + IID_PPV_ARGS(&library_reflection)); + if (SUCCEEDED(hr)) + { + // Parse all exported function/shader names from the DXIL library. + D3D12_LIBRARY_DESC library_desc; + hr = library_reflection->GetDesc(&library_desc); + if (SUCCEEDED(hr)) + { + for (UINT i = 0; i < library_desc.FunctionCount; i++) + { + // The pointer returned by GetFunctionByIndex is owned by the + // ID3D12LibraryReflection object and does not need to be memory managed + // in this scope. + ID3D12FunctionReflection* function_reflection = + library_reflection->GetFunctionByIndex(i); + + D3D12_FUNCTION_DESC function_desc; + hr = function_reflection->GetDesc(&function_desc); + if (SUCCEEDED(hr)) + { + // Export names are mangled so parse the unmangled name. + auto function_name = DemangleDxilExportName(function_desc.Name); + if (!function_name.empty()) + { + std::wstring_convert> + converter; + export_names.insert(converter.from_bytes(function_name)); + } + } + } + } + } + } + } + } + } + } + if (FAILED(hr)) + { + GFXRECON_LOG_WARNING("Failed to parse shader exports from the DXIL subobject. Shader ID to LRS " + "associations may be incorrect."); + } +#else // GFXRECON_DXC_SUPPORT + GFXRECON_LOG_WARNING_ONCE( + "GFXReconstruct was built without DirectX Shader Compiler support and cannot parse the exports " + "from DXIL_LIBRARY subobjects. This may lead to incorrect DXR behavior. To fix this, be sure that " + "the DXC depedency is successfully found during CMake project configuration."); +#endif // GFXRECON_DXC_SUPPORT } else { + // Get the shader names specified explicitly in the D3D12_DXIL_LIBRARY_DESC. for (UINT j = 0; j < num_exports; ++j) { export_names.insert(dxil_lib_desc_decoder->GetMetaStructPointer() diff --git a/framework/graphics/dx12_util.h b/framework/graphics/dx12_util.h index 21dc695400..91f89bdc9a 100644 --- a/framework/graphics/dx12_util.h +++ b/framework/graphics/dx12_util.h @@ -39,6 +39,11 @@ #include #include +#if defined(GFXRECON_DXC_SUPPORT) +#include +#include +#endif + GFXRECON_BEGIN_NAMESPACE(gfxrecon) GFXRECON_BEGIN_NAMESPACE(graphics) GFXRECON_BEGIN_NAMESPACE(dx12) @@ -86,6 +91,15 @@ typedef _com_ptr_t< ID3D12VersionedRootSignatureDeserializerComPtr; typedef _com_ptr_t<_com_IIID> ID3D12ObjectComPtr; +#if defined(GFXRECON_DXC_SUPPORT) +typedef _com_ptr_t<_com_IIID> IDxcUtilsComPtr; +typedef _com_ptr_t<_com_IIID> + IDxcContainerReflectionComPtr; +typedef _com_ptr_t<_com_IIID> IDxcBlobEncodingComPtr; +typedef _com_ptr_t<_com_IIID> + ID3D12LibraryReflectionComPtr; +#endif // GFXRECON_DXC_SUPPORT + struct CommandSet { ID3D12CommandAllocatorComPtr allocator{ nullptr }; From 066e0aeb147eeb347b9685819be9ce319b00ce81 Mon Sep 17 00:00:00 2001 From: Antonio Caggiano Date: Wed, 30 Oct 2024 14:23:30 +0100 Subject: [PATCH 49/70] Fix vkGetQueryPoolResults timeout loop (#1843) Retry when capture was SUCCESS and replay is NOT_READY. Continue in all other cases, including when capture was NOT_READY and replay is SUCCESS. --- framework/decode/vulkan_replay_consumer_base.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/framework/decode/vulkan_replay_consumer_base.cpp b/framework/decode/vulkan_replay_consumer_base.cpp index 0c326963bd..c86fe5f5c5 100644 --- a/framework/decode/vulkan_replay_consumer_base.cpp +++ b/framework/decode/vulkan_replay_consumer_base.cpp @@ -3365,8 +3365,7 @@ VkResult VulkanReplayConsumerBase::OverrideGetQueryPoolResults(PFN_vkGetQueryPoo do { result = func(device, query_pool, firstQuery, queryCount, dataSize, pData->GetOutputPointer(), stride, flags); - } while ((((original_result == VK_SUCCESS) && (result == VK_NOT_READY)) || - ((original_result == VK_NOT_READY) && (result == VK_SUCCESS))) && + } while (((original_result == VK_SUCCESS) && (result == VK_NOT_READY)) && (++retries <= kMaxQueryPoolResultsRetries)); return result; From 0ed818b0cfbdfea226cc7d57bf8cdd5d81b3b958 Mon Sep 17 00:00:00 2001 From: David Pinedo Date: Tue, 29 Oct 2024 13:32:13 -0600 Subject: [PATCH 50/70] dump resources: Corrections to dump resources help text --- USAGE_android.md | 5 +++-- USAGE_desktop_Vulkan.md | 5 +++-- tools/replay/replay_settings.h | 5 +++-- vulkan_dump_resources.md | 2 +- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/USAGE_android.md b/USAGE_android.md index 4d5530ff36..5324c9f122 100644 --- a/USAGE_android.md +++ b/USAGE_android.md @@ -869,8 +869,9 @@ optional arguments: before calling Present. This is needed for accurate acquisition of instrumentation data on some platforms. --dump-resources - is BeginCommandBuffer=,Draw=,BeginRenderPass=, - NextSubpass=

,Dispatch=,TraceRays=,QueueSubmit= + is BeginCommandBuffer=,Draw=,BeginRenderPass=

, + NextSubpass=,EndRenderPass=,Dispatch=,TraceRays=, + QueueSubmit= GPU resources are dumped after the given vkCmdDraw*, vkCmdDispatch, or vkCmdTraceRaysKHR is replayed. Dump gpu resources after the given vmCmdDraw*, vkCmdDispatch, or diff --git a/USAGE_desktop_Vulkan.md b/USAGE_desktop_Vulkan.md index cfe70dabea..0999a988e6 100644 --- a/USAGE_desktop_Vulkan.md +++ b/USAGE_desktop_Vulkan.md @@ -725,8 +725,9 @@ Optional arguments: before calling Present. This is needed for accurate acquisition of instrumentation data on some platforms. --dump-resources - is BeginCommandBuffer=,Draw=,BeginRenderPass=, - NextSubpass=

,Dispatch=,TraceRays=,QueueSubmit= + is BeginCommandBuffer=,Draw=,BeginRenderPass=

, + NextSubpass=,EndRenderPass=,Dispatch=,TraceRays=, + QueueSubmit= GPU resources are dumped after the given vkCmdDraw*, vkCmdDispatch, or vkCmdTraceRaysKHR is replayed. Dump gpu resources after the given vmCmdDraw*, vkCmdDispatch, or vkCmdTraceRaysKHR is replayed. The parameter for diff --git a/tools/replay/replay_settings.h b/tools/replay/replay_settings.h index 96bf18bd3f..0bf1d2a1e9 100644 --- a/tools/replay/replay_settings.h +++ b/tools/replay/replay_settings.h @@ -286,8 +286,9 @@ static void PrintUsage(const char* exe_name) GFXRECON_WRITE_CONSOLE(" \t\tbefore calling Present. This is needed for accurate acquisition"); GFXRECON_WRITE_CONSOLE(" \t\tof instrumentation data on some platforms."); GFXRECON_WRITE_CONSOLE(" --dump-resources "); - GFXRECON_WRITE_CONSOLE(" \t\t is BeginCommandBuffer=,Draw=,BeginRenderPass=,"); - GFXRECON_WRITE_CONSOLE(" \t\tNextSubpass=

,Dispatch=,TraceRays=,QueueSubmit="); + GFXRECON_WRITE_CONSOLE(" \t\t is BeginCommandBuffer=,Draw=,BeginRenderPass=

,"); + GFXRECON_WRITE_CONSOLE(" \t\tNextSubpass=,EndRenderPass=,Dispatch=,TraceRays=,"); + GFXRECON_WRITE_CONSOLE(" \t\tQueueSubmit="); GFXRECON_WRITE_CONSOLE(" \t\tGPU resources are dumped after the given vkCmdDraw*,"); GFXRECON_WRITE_CONSOLE(" \t\tvkCmdDispatch, or vkCmdTraceRaysKHR is replayed."); GFXRECON_WRITE_CONSOLE(" --dump-resources "); diff --git a/vulkan_dump_resources.md b/vulkan_dump_resources.md index 64e305d402..6687368a65 100644 --- a/vulkan_dump_resources.md +++ b/vulkan_dump_resources.md @@ -229,7 +229,7 @@ In this example two command buffers are submitted for dumping, one with object I Dump resources feature can be control in several ways. To do so, a number of parameters can be provided to either to the `gfxrecon-replay` tool or to the Android application through the `gfxrecon.py` script: ```text ---dump-resources BeginCommandBuffer=,Draw=,BeginRenderPass=,NextSubPass=,EndRenderPass=,Dispatch=,TraceRays=,QueueSubmit= +--dump-resources BeginCommandBuffer=,Draw=,BeginRenderPass=

,NextSubPass=,EndRenderPass=,Dispatch=,TraceRays=,QueueSubmit= Dump gpu resources after the given vkCmdDraw*, vkCmdDispatch, or vkCmdTraceRaysKHR is replayed. The parameter for each is a block index from the capture file. The additional parameters are used to identify during which occurence of the vkCmdDraw/VkCmdDispath/VkCmdTrancRaysKHR resources will be dumped. NextSubPass can be repeated 0 or more times to From 74c565bbea106cfe9cb30c708f05617da2edec49 Mon Sep 17 00:00:00 2001 From: Dustin Graves Date: Tue, 22 Oct 2024 16:48:29 -0600 Subject: [PATCH 51/70] Clear CachedPSO data for CreatePipelineState replay When the replay --use-cached-psos option is not specified, clear the cached PSO data from the ID3D12Device2::CreatePipelineState parameters, similar to what is currently done for the ID3D12Device CreateComputePipelineState and CreateGraphicsPipelineState methods. --- .../decode/dx12_replay_consumer_base.cpp | 33 +++++++++++++++++++ framework/decode/dx12_replay_consumer_base.h | 6 ++++ .../dx12_generators/replay_overrides.json | 3 ++ .../generated_dx12_replay_consumer.cpp | 14 ++++---- 4 files changed, 50 insertions(+), 6 deletions(-) diff --git a/framework/decode/dx12_replay_consumer_base.cpp b/framework/decode/dx12_replay_consumer_base.cpp index e2ef484f5b..025bfbbd49 100644 --- a/framework/decode/dx12_replay_consumer_base.cpp +++ b/framework/decode/dx12_replay_consumer_base.cpp @@ -3580,6 +3580,39 @@ HRESULT Dx12ReplayConsumerBase::OverrideCreateComputePipelineState( return replay_result; } +HRESULT Dx12ReplayConsumerBase::OverrideCreatePipelineState( + DxObjectInfo* device_object_info, + HRESULT original_result, + StructPointerDecoder* pDesc, + Decoded_GUID riid, + HandlePointerDecoder* ppPipelineState) +{ + GFXRECON_UNREFERENCED_PARAMETER(original_result); + + GFXRECON_ASSERT(device_object_info != nullptr); + GFXRECON_ASSERT(device_object_info->object != nullptr); + + auto device = static_cast(device_object_info->object); + + auto pDesc2 = pDesc->GetPointer(); + if (!options_.use_cached_psos) + { + auto desc = pDesc->GetMetaStructPointer(); + GFXRECON_ASSERT(desc != nullptr); + + if (desc->cached_pso.decoded_value != nullptr) + { + desc->cached_pso.decoded_value->pCachedBlob = nullptr; + desc->cached_pso.decoded_value->CachedBlobSizeInBytes = 0; + } + } + + HRESULT replay_result = + device->CreatePipelineState(pDesc2, *riid.decoded_value, ppPipelineState->GetHandlePointer()); + + return replay_result; +} + HRESULT Dx12ReplayConsumerBase::OverrideSetFullscreenState(DxObjectInfo* swapchain_info, HRESULT original_result, diff --git a/framework/decode/dx12_replay_consumer_base.h b/framework/decode/dx12_replay_consumer_base.h index 17c25a17cf..6029cf702e 100644 --- a/framework/decode/dx12_replay_consumer_base.h +++ b/framework/decode/dx12_replay_consumer_base.h @@ -741,6 +741,12 @@ class Dx12ReplayConsumerBase : public Dx12Consumer Decoded_GUID riid, HandlePointerDecoder* pipelineState); + HRESULT OverrideCreatePipelineState(DxObjectInfo* device_object_info, + HRESULT original_result, + StructPointerDecoder* pDesc, + Decoded_GUID riid, + HandlePointerDecoder* ppPipelineState); + HRESULT OverrideSetFullscreenState(DxObjectInfo* swapchain_info, HRESULT original_result, BOOL Fullscreen, diff --git a/framework/generated/dx12_generators/replay_overrides.json b/framework/generated/dx12_generators/replay_overrides.json index 23c9b2c799..24659c4eab 100644 --- a/framework/generated/dx12_generators/replay_overrides.json +++ b/framework/generated/dx12_generators/replay_overrides.json @@ -39,6 +39,9 @@ "ID3D12Device1": { "CreatePipelineLibrary": "OverrideCreatePipelineLibrary" }, + "ID3D12Device2": { + "CreatePipelineState": "OverrideCreatePipelineState" + }, "ID3D12Device3": { "EnqueueMakeResident": "OverrideEnqueueMakeResident", "OpenExistingHeapFromAddress": "OverrideOpenExistingHeapFromAddress" diff --git a/framework/generated/generated_dx12_replay_consumer.cpp b/framework/generated/generated_dx12_replay_consumer.cpp index 60d1f889d2..c4500a7671 100644 --- a/framework/generated/generated_dx12_replay_consumer.cpp +++ b/framework/generated/generated_dx12_replay_consumer.cpp @@ -10214,14 +10214,16 @@ void Dx12ReplayConsumer::Process_ID3D12Device2_CreatePipelineState( ppPipelineState); MapStructObjects(pDesc->GetMetaStructPointer(), GetObjectInfoTable(), GetGpuVaTable()); if(!ppPipelineState->IsNull()) ppPipelineState->SetHandleLength(1); - auto out_p_ppPipelineState = ppPipelineState->GetPointer(); - auto out_hp_ppPipelineState = ppPipelineState->GetHandlePointer(); - auto replay_result = reinterpret_cast(replay_object->object)->CreatePipelineState(pDesc->GetPointer(), - *riid.decoded_value, - out_hp_ppPipelineState); + DxObjectInfo object_info_ppPipelineState{}; + ppPipelineState->SetConsumerData(0, &object_info_ppPipelineState); + auto replay_result = OverrideCreatePipelineState(replay_object, + return_value, + pDesc, + riid, + ppPipelineState); if (SUCCEEDED(replay_result)) { - AddObject(out_p_ppPipelineState, out_hp_ppPipelineState, format::ApiCall_ID3D12Device2_CreatePipelineState); + AddObject(ppPipelineState->GetPointer(), ppPipelineState->GetHandlePointer(), std::move(object_info_ppPipelineState), format::ApiCall_ID3D12Device2_CreatePipelineState); } CheckReplayResult("ID3D12Device2_CreatePipelineState", return_value, replay_result); CustomReplayPostCall::Dispatch( From b4e1b9dc03cce34d499ebe3e6678713457b753ac Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Mon, 28 Oct 2024 14:23:14 -0600 Subject: [PATCH 52/70] vulkan: Expose dispatch table getters For layered API support. --- framework/decode/vulkan_replay_consumer_base.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/decode/vulkan_replay_consumer_base.h b/framework/decode/vulkan_replay_consumer_base.h index aa02e92e1f..f6dd552fc3 100644 --- a/framework/decode/vulkan_replay_consumer_base.h +++ b/framework/decode/vulkan_replay_consumer_base.h @@ -237,15 +237,15 @@ class VulkanReplayConsumerBase : public VulkanConsumer return MapHandle(capture_id, &CommonObjectInfoTable::GetVkDeviceInfo); } + const encode::VulkanInstanceTable* GetInstanceTable(const void* handle) const; + + const encode::VulkanDeviceTable* GetDeviceTable(const void* handle) const; + protected: const CommonObjectInfoTable& GetObjectInfoTable() const { return *object_info_table_; } CommonObjectInfoTable& GetObjectInfoTable() { return *object_info_table_; } - const encode::VulkanInstanceTable* GetInstanceTable(const void* handle) const; - - const encode::VulkanDeviceTable* GetDeviceTable(const void* handle) const; - void* PreProcessExternalObject(uint64_t object_id, format::ApiCallId call_id, const char* call_name); void PostProcessExternalObject( From 5c6558842be9cd1308dba262b2a532d1ef04b846 Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Mon, 28 Oct 2024 14:30:42 -0600 Subject: [PATCH 53/70] Add interface to get/set capture mode For layered API support. --- framework/encode/api_capture_manager.h | 1 + framework/encode/capture_manager.h | 1 + 2 files changed, 2 insertions(+) diff --git a/framework/encode/api_capture_manager.h b/framework/encode/api_capture_manager.h index f5e453c10b..9f2e3d177c 100644 --- a/framework/encode/api_capture_manager.h +++ b/framework/encode/api_capture_manager.h @@ -186,6 +186,7 @@ class ApiCaptureManager bool IsTrimEnabled() const { return common_manager_->IsTrimEnabled(); } uint32_t GetCurrentFrame() const { return common_manager_->GetCurrentFrame(); } CommonCaptureManager::CaptureMode GetCaptureMode() const { return common_manager_->GetCaptureMode(); } + void SetCaptureMode(CommonCaptureManager::CaptureMode mode) { common_manager_->SetCaptureMode(mode); } bool GetDebugLayerSetting() const { return common_manager_->GetDebugLayerSetting(); } bool GetDebugDeviceLostSetting() const { return common_manager_->GetDebugDeviceLostSetting(); } bool GetDisableDxrSetting() const { return common_manager_->GetDisableDxrSetting(); } diff --git a/framework/encode/capture_manager.h b/framework/encode/capture_manager.h index b54f4e59e5..4423c1def2 100644 --- a/framework/encode/capture_manager.h +++ b/framework/encode/capture_manager.h @@ -260,6 +260,7 @@ class CommonCaptureManager bool IsTrimEnabled() const { return trim_enabled_; } uint32_t GetCurrentFrame() const { return current_frame_; } CaptureMode GetCaptureMode() const { return capture_mode_; } + void SetCaptureMode(CaptureMode new_mode) { capture_mode_ = new_mode; } bool GetDebugLayerSetting() const { return debug_layer_; } bool GetDebugDeviceLostSetting() const { return debug_device_lost_; } bool GetDisableDxrSetting() const { return disable_dxr_; } From 14025a092550d25f6d5f29c5cd09f6cc82ef28a4 Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Mon, 28 Oct 2024 16:32:11 -0600 Subject: [PATCH 54/70] Add needed decoder and interface extensions Layered API support --- framework/decode/struct_pointer_decoder.h | 28 ++++++++++++++++++- .../decode/vulkan_replay_consumer_base.h | 5 ++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/framework/decode/struct_pointer_decoder.h b/framework/decode/struct_pointer_decoder.h index 8d8d51d021..5c5f4f51fc 100644 --- a/framework/decode/struct_pointer_decoder.h +++ b/framework/decode/struct_pointer_decoder.h @@ -39,10 +39,35 @@ #include #include +#include +#include GFXRECON_BEGIN_NAMESPACE(gfxrecon) GFXRECON_BEGIN_NAMESPACE(decode) +template +struct DecoderHasOutputAllocator : std::false_type +{}; + +template +struct DecoderHasOutputAllocator().AllocateOutputData(size_t(1)))>> + : std::true_type +{}; + +template +std::enable_if_t::value, typename T::struct_type*> +StructPointerOutputDataAllocator(T*, size_t len) +{ + return DecodeAllocator::Allocate(len); +} + +template +std::enable_if_t::value, typename T::struct_type*> +StructPointerOutputDataAllocator(T* decoded_value, size_t len) +{ + return decoded_value->AllocateOutputData(len); +} + template class StructPointerDecoder : public PointerDecoderBase { @@ -68,12 +93,13 @@ class StructPointerDecoder : public PointerDecoderBase typename T::struct_type* AllocateOutputData(size_t len) { output_len_ = len; - output_data_ = DecodeAllocator::Allocate(len); + output_data_ = StructPointerOutputDataAllocator(decoded_structs_, len); return output_data_; } typename T::struct_type* AllocateOutputData(size_t len, const typename T::struct_type& init) { + assert(!DecoderHasOutputAllocator::value); output_len_ = len; output_data_ = DecodeAllocator::Allocate(len); diff --git a/framework/decode/vulkan_replay_consumer_base.h b/framework/decode/vulkan_replay_consumer_base.h index f6dd552fc3..55600bca8f 100644 --- a/framework/decode/vulkan_replay_consumer_base.h +++ b/framework/decode/vulkan_replay_consumer_base.h @@ -240,6 +240,11 @@ class VulkanReplayConsumerBase : public VulkanConsumer const encode::VulkanInstanceTable* GetInstanceTable(const void* handle) const; const encode::VulkanDeviceTable* GetDeviceTable(const void* handle) const; + void AddImageHandle(format::HandleId parent_id, format::HandleId id, VkImage handle, VulkanImageInfo&& initial_info) + { + AddHandle( + parent_id, &id, &handle, std::move(initial_info), &VulkanObjectInfoTable::AddVkImageInfo); + } protected: const CommonObjectInfoTable& GetObjectInfoTable() const { return *object_info_table_; } From 4accdafef395e40cb1a160e9ee47f4a02511e1ca Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Wed, 30 Oct 2024 06:19:49 -0600 Subject: [PATCH 55/70] Add initialze to StructPointerDecoder allocation Prevents uninitialized next/pNext. The lower level decoders aren't always setting values. Set a good state at initialization (zeros) Layered API support. --- framework/decode/struct_pointer_decoder.h | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/framework/decode/struct_pointer_decoder.h b/framework/decode/struct_pointer_decoder.h index 5c5f4f51fc..b54d15e578 100644 --- a/framework/decode/struct_pointer_decoder.h +++ b/framework/decode/struct_pointer_decoder.h @@ -248,7 +248,10 @@ class StructPointerDecoder : public PointerDecoderBase typename T::struct_type* inner_struct_memory = DecodeAllocator::Allocate(inner_lens_[i]); - T* inner_decoded_structs = DecodeAllocator::Allocate(inner_lens_[i]); + // TODO: We initialize == true because the nexe field isn't always cleared on kIsNull in the lower + // level decoders. If this is a performance bottleneck, can clean up the lower decoders to + // initialize all fields. + T* inner_decoded_structs = DecodeAllocator::Allocate(inner_lens_[i], true); for (size_t j = 0; j < inner_lens_[i]; ++j) { From 4cd96792b6fae1484576cd1923bbcd70afb42a7a Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Wed, 30 Oct 2024 06:31:18 -0600 Subject: [PATCH 56/70] Add vulkan guard to consumer and decode variables Prevent collisions with other API consumers and decoders Layered API support. --- tools/replay/android_main.cpp | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tools/replay/android_main.cpp b/tools/replay/android_main.cpp index a20c740e8d..1abf506ea0 100644 --- a/tools/replay/android_main.cpp +++ b/tools/replay/android_main.cpp @@ -1,6 +1,6 @@ /* ** Copyright (c) 2018-2020 Valve Corporation -** Copyright (c) 2018-2020 LunarG, Inc. +** Copyright (c) 2018-2024 LunarG, Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a ** copy of this software and associated documentation files (the "Software"), @@ -133,8 +133,8 @@ void android_main(struct android_app* app) return; } - gfxrecon::decode::VulkanReplayConsumer replay_consumer(application, replay_options); - gfxrecon::decode::VulkanDecoder decoder; + gfxrecon::decode::VulkanReplayConsumer vulkan_replay_consumer(application, replay_options); + gfxrecon::decode::VulkanDecoder vulkan_decoder; uint32_t start_frame, end_frame; bool has_mfr = GetMeasurementFrameRange(arg_parser, start_frame, end_frame); std::string measurement_file_name; @@ -153,12 +153,13 @@ void android_main(struct android_app* app) replay_options.preload_measurement_range, measurement_file_name); - replay_consumer.SetFatalErrorHandler([](const char* message) { throw std::runtime_error(message); }); - replay_consumer.SetFpsInfo(&fps_info); + vulkan_replay_consumer.SetFatalErrorHandler( + [](const char* message) { throw std::runtime_error(message); }); + vulkan_replay_consumer.SetFpsInfo(&fps_info); - decoder.AddConsumer(&replay_consumer); + vulkan_decoder.AddConsumer(&vulkan_replay_consumer); - file_processor->AddDecoder(&decoder); + file_processor->AddDecoder(&vulkan_decoder); application->SetPauseFrame(GetPauseFrame(arg_parser)); From f76e069b30e433ab23b5ade8645d9115133be89b Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Wed, 30 Oct 2024 13:56:40 -0600 Subject: [PATCH 57/70] vulkan: Split OverrideCreateInstance/Device Split vulkan instance and device creation override into reusable pieces suitable for layered API support (specifically OpenXR) Make neeed information public for other replay consumers Layered API support. --- .../decode/vulkan_replay_consumer_base.cpp | 209 +++++++++++------- .../decode/vulkan_replay_consumer_base.h | 53 ++++- 2 files changed, 182 insertions(+), 80 deletions(-) diff --git a/framework/decode/vulkan_replay_consumer_base.cpp b/framework/decode/vulkan_replay_consumer_base.cpp index c86fe5f5c5..8ca73db6d3 100644 --- a/framework/decode/vulkan_replay_consumer_base.cpp +++ b/framework/decode/vulkan_replay_consumer_base.cpp @@ -2334,29 +2334,21 @@ bool VulkanReplayConsumerBase::CheckPNextChainForFrameBoundary(const VulkanDevic return true; } -VkResult -VulkanReplayConsumerBase::OverrideCreateInstance(VkResult original_result, - const StructPointerDecoder* pCreateInfo, - const StructPointerDecoder* pAllocator, - HandlePointerDecoder* pInstance) +void VulkanReplayConsumerBase::ModifyCreateInstanceInfo( + const StructPointerDecoder* pCreateInfo, CreateInstanceInfoState& create_state) { - GFXRECON_UNREFERENCED_PARAMETER(original_result); - - assert((pInstance != nullptr) && !pInstance->IsNull() && (pInstance->GetHandlePointer() != nullptr) && - (pCreateInfo != nullptr) && (pCreateInfo->GetPointer() != nullptr) && - (pInstance->GetHandlePointer() != nullptr)); const VkInstanceCreateInfo* replay_create_info = pCreateInfo->GetPointer(); - VkInstance* replay_instance = pInstance->GetHandlePointer(); if (loader_handle_ == nullptr) { InitializeLoader(); } - std::vector modified_layers; - std::vector modified_extensions; - VkInstanceCreateInfo modified_create_info = (*replay_create_info); + std::vector& modified_layers = create_state.modified_layers; + std::vector& modified_extensions = create_state.modified_extensions; + VkInstanceCreateInfo& modified_create_info = create_state.modified_create_info; + modified_create_info = *replay_create_info; // If VkDebugUtilsMessengerCreateInfoEXT or VkDebugReportCallbackCreateInfoEXT are in the pNext chain, update the // callback pointers. @@ -2534,63 +2526,77 @@ VulkanReplayConsumerBase::OverrideCreateInstance(VkResult original_result, modified_create_info.enabledLayerCount = static_cast(modified_layers.size()); modified_create_info.ppEnabledLayerNames = modified_layers.data(); } +} - VkResult result = create_instance_proc_(&modified_create_info, GetAllocationCallbacks(pAllocator), replay_instance); +void VulkanReplayConsumerBase::PostCreateInstanceUpdateState(const VkInstance replay_instance, + const VkInstanceCreateInfo& modified_create_info, + VulkanInstanceInfo& instance_info) +{ + AddInstanceTable(replay_instance); - if ((replay_instance != nullptr) && (result == VK_SUCCESS)) + if (modified_create_info.pApplicationInfo != nullptr) { - AddInstanceTable(*replay_instance); - - if (modified_create_info.pApplicationInfo != nullptr) - { - auto instance_info = reinterpret_cast(pInstance->GetConsumerData(0)); - assert(instance_info != nullptr); - - instance_info->api_version = modified_create_info.pApplicationInfo->apiVersion; - instance_info->enabled_extensions.assign(modified_create_info.ppEnabledExtensionNames, - modified_create_info.ppEnabledExtensionNames + - modified_create_info.enabledExtensionCount); - } + instance_info.api_version = modified_create_info.pApplicationInfo->apiVersion; + instance_info.enabled_extensions.assign(modified_create_info.ppEnabledExtensionNames, + modified_create_info.ppEnabledExtensionNames + + modified_create_info.enabledExtensionCount); } - - return result; } VkResult -VulkanReplayConsumerBase::OverrideCreateDevice(VkResult original_result, - VulkanPhysicalDeviceInfo* physical_device_info, - const StructPointerDecoder* pCreateInfo, - const StructPointerDecoder* pAllocator, - HandlePointerDecoder* pDevice) +VulkanReplayConsumerBase::OverrideCreateInstance(VkResult original_result, + const StructPointerDecoder* pCreateInfo, + const StructPointerDecoder* pAllocator, + HandlePointerDecoder* pInstance) { GFXRECON_UNREFERENCED_PARAMETER(original_result); - assert((physical_device_info != nullptr) && (pDevice != nullptr) && !pDevice->IsNull() && - (pDevice->GetHandlePointer() != nullptr) && (pCreateInfo != nullptr)); + assert((pInstance != nullptr) && !pInstance->IsNull() && (pInstance->GetHandlePointer() != nullptr) && + (pCreateInfo != nullptr) && (pCreateInfo->GetPointer() != nullptr) && + (pInstance->GetHandlePointer() != nullptr)); - SelectPhysicalDevice(physical_device_info); + // Update the create info to reflect the expectations/limitations of the replaying system (and of GFXR) + // Note: create_state is passed into the Modify call to allow the modified_create_info to reference + // addresses of create_state members which a return value doesn't appear to preserve + CreateInstanceInfoState create_state; + ModifyCreateInstanceInfo(pCreateInfo, create_state); - VkPhysicalDevice physical_device = physical_device_info->handle; - PFN_vkGetDeviceProcAddr get_device_proc_addr = GetDeviceAddrProc(physical_device); - PFN_vkCreateDevice create_device_proc = GetCreateDeviceProc(physical_device); + VkInstance* replay_instance = pInstance->GetHandlePointer(); + assert(replay_instance); + *replay_instance = VK_NULL_HANDLE; + + VkResult result = + create_instance_proc_(&create_state.modified_create_info, GetAllocationCallbacks(pAllocator), replay_instance); - if ((get_device_proc_addr == nullptr) || (create_device_proc == nullptr)) + if ((*replay_instance != VK_NULL_HANDLE) && (result == VK_SUCCESS)) { - return VK_ERROR_INITIALIZATION_FAILED; + auto instance_info = reinterpret_cast(pInstance->GetConsumerData(0)); + assert(instance_info); + PostCreateInstanceUpdateState(*replay_instance, create_state.modified_create_info, *instance_info); } - VkResult result = VK_ERROR_INITIALIZATION_FAILED; + return result; +} + +void VulkanReplayConsumerBase::ModifyCreateDeviceInfo( + VulkanPhysicalDeviceInfo* physical_device_info, + const StructPointerDecoder* pCreateInfo, + CreateDeviceInfoState& create_state) +{ + const VkPhysicalDevice physical_device = physical_device_info->handle; + auto instance_table = GetInstanceTable(physical_device); assert(instance_table != nullptr); auto replay_create_info = pCreateInfo->GetPointer(); - auto replay_device = pDevice->GetHandlePointer(); assert(replay_create_info != nullptr); - VkDeviceCreateInfo modified_create_info = (*replay_create_info); - std::vector modified_extensions; + VkDeviceCreateInfo& modified_create_info = create_state.modified_create_info; + std::vector& modified_extensions = create_state.modified_extensions; + std::vector& replay_device_group = create_state.replay_device_group; + VkDeviceGroupDeviceCreateInfo& modified_device_group_create_info = create_state.modified_device_group_create_info; - // Attempt to recreate capture device group with replay device group + modified_create_info = (*replay_create_info); // Attempt to recreate capture device group with replay device group const auto decoded_capture_create_info = pCreateInfo->GetMetaStructPointer(); std::vector capture_device_group; @@ -2605,8 +2611,6 @@ VulkanReplayConsumerBase::OverrideCreateDevice(VkResult origina std::copy(handle_ids, handle_ids + len, std::back_inserter(capture_device_group)); } - VkDeviceGroupDeviceCreateInfo modified_device_group_create_info = {}; - std::vector replay_device_group; const VkBaseInStructure* replay_previous_next = reinterpret_cast(&modified_create_info); const VkBaseInStructure* replay_next = reinterpret_cast(modified_create_info.pNext); @@ -2643,7 +2647,7 @@ VulkanReplayConsumerBase::OverrideCreateDevice(VkResult origina } // Enable extensions used for loading resources during initial state setup for trimmed files. - std::vector trim_extensions; + std::vector& trim_extensions = create_state.trim_extensions; if (loading_trim_state_ && CheckTrimDeviceExtensions(physical_device, &trim_extensions)) { for (const auto& extension : trim_extensions) @@ -2705,8 +2709,7 @@ VulkanReplayConsumerBase::OverrideCreateDevice(VkResult origina modified_create_info.ppEnabledExtensionNames = modified_extensions.data(); // Enable necessary features - graphics::VulkanDeviceUtil device_util; - graphics::VulkanDevicePropertyFeatureInfo property_feature_info = device_util.EnableRequiredPhysicalDeviceFeatures( + create_state.property_feature_info = create_state.device_util.EnableRequiredPhysicalDeviceFeatures( physical_device_info->parent_api_version, instance_table, physical_device, &modified_create_info); // Abort on/Remove unsupported features @@ -2716,23 +2719,24 @@ VulkanReplayConsumerBase::OverrideCreateDevice(VkResult origina modified_create_info.pNext, modified_create_info.pEnabledFeatures, options_.remove_unsupported_features); +} - // Forward device creation to next layer/driver - result = - create_device_proc(physical_device, &modified_create_info, GetAllocationCallbacks(pAllocator), replay_device); - - if ((replay_device == nullptr) || (result != VK_SUCCESS)) +VkResult VulkanReplayConsumerBase::PostCreateDeviceUpdateState(VulkanPhysicalDeviceInfo* physical_device_info, + const VkDevice replay_device, + CreateDeviceInfoState& create_state, + VulkanDeviceInfo* device_info) +{ + VkPhysicalDevice physical_device = physical_device_info->handle; + PFN_vkGetDeviceProcAddr get_device_proc_addr = GetDeviceAddrProc(physical_device); + if (get_device_proc_addr == nullptr) { - return result; + return VK_ERROR_INITIALIZATION_FAILED; } + AddDeviceTable(replay_device, get_device_proc_addr); - AddDeviceTable(*replay_device, get_device_proc_addr); - - auto device_info = reinterpret_cast(pDevice->GetConsumerData(0)); assert(device_info != nullptr); - - device_info->replay_device_group = std::move(replay_device_group); - device_info->extensions = std::move(trim_extensions); + device_info->replay_device_group = std::move(create_state.replay_device_group); + device_info->extensions = std::move(create_state.trim_extensions); device_info->parent = physical_device; // Create the memory allocator for the selected physical device. @@ -2751,32 +2755,32 @@ VulkanReplayConsumerBase::OverrideCreateDevice(VkResult origina auto allocator = options_.create_resource_allocator(); - std::vector enabled_extensions(modified_create_info.ppEnabledExtensionNames, - modified_create_info.ppEnabledExtensionNames + - modified_create_info.enabledExtensionCount); - InitializeResourceAllocator(physical_device_info, *replay_device, enabled_extensions, allocator); + std::vector enabled_extensions(create_state.modified_create_info.ppEnabledExtensionNames, + create_state.modified_create_info.ppEnabledExtensionNames + + create_state.modified_create_info.enabledExtensionCount); + InitializeResourceAllocator(physical_device_info, replay_device, enabled_extensions, allocator); device_info->allocator = std::unique_ptr(allocator); // Track state of physical device properties and features at device creation - device_info->property_feature_info = property_feature_info; + device_info->property_feature_info = create_state.property_feature_info; // Keep track of what queue families this device is planning on using. This information is // very important if we end up using the VulkanVirtualSwapchain path. auto max = [](uint32_t current_max, const VkDeviceQueueCreateInfo& dqci) { return std::max(current_max, dqci.queueFamilyIndex); }; - uint32_t max_queue_family = - std::accumulate(modified_create_info.pQueueCreateInfos, - modified_create_info.pQueueCreateInfos + modified_create_info.queueCreateInfoCount, - 0, - max); + uint32_t max_queue_family = std::accumulate(create_state.modified_create_info.pQueueCreateInfos, + create_state.modified_create_info.pQueueCreateInfos + + create_state.modified_create_info.queueCreateInfoCount, + 0, + max); device_info->queue_family_index_enabled.clear(); device_info->queue_family_index_enabled.resize(max_queue_family + 1, false); - for (uint32_t q = 0; q < modified_create_info.queueCreateInfoCount; ++q) + for (uint32_t q = 0; q < create_state.modified_create_info.queueCreateInfoCount; ++q) { - const VkDeviceQueueCreateInfo* queue_create_info = &modified_create_info.pQueueCreateInfos[q]; + const VkDeviceQueueCreateInfo* queue_create_info = &create_state.modified_create_info.pQueueCreateInfos[q]; assert(device_info->queue_family_creation_flags.find(queue_create_info->queueFamilyIndex) == device_info->queue_family_creation_flags.end()); device_info->queue_family_creation_flags[queue_create_info->queueFamilyIndex] = queue_create_info->flags; @@ -2784,8 +2788,57 @@ VulkanReplayConsumerBase::OverrideCreateDevice(VkResult origina } // Restore modified property/feature create info values to the original application values - device_util.RestoreModifiedPhysicalDeviceFeatures(); + create_state.device_util.RestoreModifiedPhysicalDeviceFeatures(); + + return VK_SUCCESS; +} + +VkResult +VulkanReplayConsumerBase::OverrideCreateDevice(VkResult original_result, + VulkanPhysicalDeviceInfo* physical_device_info, + const StructPointerDecoder* pCreateInfo, + const StructPointerDecoder* pAllocator, + HandlePointerDecoder* pDevice) +{ + GFXRECON_UNREFERENCED_PARAMETER(original_result); + + assert((physical_device_info != nullptr) && (pDevice != nullptr) && !pDevice->IsNull() && + (pDevice->GetHandlePointer() != nullptr) && (pCreateInfo != nullptr)); + + // NOTE: This must be first as it *sets* the physical_device_info->handle to point to the replay physical device + SelectPhysicalDevice(physical_device_info); + + VkPhysicalDevice physical_device = physical_device_info->handle; + + PFN_vkCreateDevice create_device_proc = GetCreateDeviceProc(physical_device); + + if (create_device_proc == nullptr) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + // Update the create info to reflect the expectations/limitations of the replaying system (and of GFXR) + // Note: create_state is passed into the Modify call to allow the modified_create_info to reference + // addresses of create_state members which a return value doesn't appear to preserve + CreateDeviceInfoState create_state; + ModifyCreateDeviceInfo(physical_device_info, pCreateInfo, create_state); + + VkResult result = VK_ERROR_INITIALIZATION_FAILED; + auto replay_device = pDevice->GetHandlePointer(); + assert(replay_device); + + // Forward device creation to next layer/driver + result = create_device_proc( + physical_device, &create_state.modified_create_info, GetAllocationCallbacks(pAllocator), replay_device); + + if ((replay_device == nullptr) || (result != VK_SUCCESS)) + { + return result; + } + VulkanDeviceInfo* device_info = reinterpret_cast(pDevice->GetConsumerData(0)); + assert(device_info); + result = PostCreateDeviceUpdateState(physical_device_info, *replay_device, create_state, device_info); return result; } diff --git a/framework/decode/vulkan_replay_consumer_base.h b/framework/decode/vulkan_replay_consumer_base.h index 55600bca8f..9f436ca024 100644 --- a/framework/decode/vulkan_replay_consumer_base.h +++ b/framework/decode/vulkan_replay_consumer_base.h @@ -246,6 +246,57 @@ class VulkanReplayConsumerBase : public VulkanConsumer parent_id, &id, &handle, std::move(initial_info), &VulkanObjectInfoTable::AddVkImageInfo); } + // Utilities for correctly setting up a vulkan create instance/device calls. Shared with OpenXR + // Store for the "modified for replay" instance create info, and all referenced memory + struct CreateInstanceInfoState + { + std::vector modified_layers; + std::vector modified_extensions; + VkInstanceCreateInfo modified_create_info; + }; + // create_state passed in by reference to conserve pointers to member variable + // Not initialized in a CreateDeviceInfoState constructor as *many* VulkanReplayConsumerBase + // member functions and variables are referenced + void ModifyCreateInstanceInfo(const StructPointerDecoder* pCreateInfo, + CreateInstanceInfoState& create_state); + + void PostCreateInstanceUpdateState(VkInstance replay_instance, + const VkInstanceCreateInfo& modified_create_info, + VulkanInstanceInfo& instance_info); + + // Store for the "modified for replay" device create info, and all referenced memory + struct CreateDeviceInfoState + { + VkDeviceCreateInfo modified_create_info; + std::vector modified_extensions; + std::vector trim_extensions; + VkDeviceGroupDeviceCreateInfo modified_device_group_create_info; + std::vector replay_device_group; + graphics::VulkanDeviceUtil device_util; + graphics::VulkanDevicePropertyFeatureInfo property_feature_info; + }; + + // create_state passed in by reference to conserve pointers to member variable + // Not initialized in a CreateDeviceInfoState constructor as *many* VulkanReplayConsumerBase + // member functions and variables are referenced + void ModifyCreateDeviceInfo(VulkanPhysicalDeviceInfo* physical_device_info, + const StructPointerDecoder* pCreateInfo, + CreateDeviceInfoState& create_state); + + VkResult PostCreateDeviceUpdateState(VulkanPhysicalDeviceInfo* physical_device_info, + VkDevice replay_device, + CreateDeviceInfoState& create_state, + VulkanDeviceInfo* device_info); + + void CheckResult(const char* func_name, VkResult original, VkResult replay, const decode::ApiCallInfo& call_info); + + PFN_vkGetInstanceProcAddr GetGetInstanceProcAddr() + { + if (loader_handle_ == nullptr) + InitializeLoader(); // Ensures GIPA is set + return get_instance_proc_addr_; + } + protected: const CommonObjectInfoTable& GetObjectInfoTable() const { return *object_info_table_; } @@ -259,8 +310,6 @@ class VulkanReplayConsumerBase : public VulkanConsumer const VkAllocationCallbacks* GetAllocationCallbacks(const StructPointerDecoder* original_callbacks); - void CheckResult(const char* func_name, VkResult original, VkResult replay, const decode::ApiCallInfo& call_info); - template typename T::HandleType MapHandle(format::HandleId id, const T* (CommonObjectInfoTable::*MapFunc)(format::HandleId) const) const From ceff61bf5133fe44a98040d26b5bffba0e002ba5 Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Wed, 30 Oct 2024 15:08:03 -0600 Subject: [PATCH 58/70] Add VkPhysicalDevice aliasing support Vulkan captures unwrapped physical device handles, Layered API (like OpenXR) captures wrapped handles. During replay two HandleId's will reference the same VkPhysical device. The vulkan_alias is the handleId as known by the vulkan_consumer, which will be created/updated, etc, by all Vulkan replay calls. --- framework/decode/vulkan_object_info.h | 10 ++ .../decode/vulkan_object_info_table_base.h | 99 +++++++++++++------ .../decode/vulkan_replay_consumer_base.cpp | 42 ++++++++ .../decode/vulkan_replay_consumer_base.h | 5 + 4 files changed, 126 insertions(+), 30 deletions(-) diff --git a/framework/decode/vulkan_object_info.h b/framework/decode/vulkan_object_info.h index 4c1cf236c3..605f3d5231 100644 --- a/framework/decode/vulkan_object_info.h +++ b/framework/decode/vulkan_object_info.h @@ -288,6 +288,16 @@ struct VulkanPhysicalDeviceInfo : public VulkanObjectInfo // Closest matching replay device. VulkanReplayDeviceInfo* replay_device_info{ nullptr }; + + // Because Vulkan captures unwrapped handles, and layered APIs (like OpenXR) + // capture wrapped handles, during replay two HandleId values will reference + // the same VkPhysical device. The vulkan_alias is the handleId as known by + // the vulkan_consumers, which will be created/updated, etc, by all Vulkan replay + // calls, s.t. the information known by the vulkan_consumer need not be duplicated. + // Operations on this, will use the vulkan_alias as the effective HandleId when set. + + // When Non-null, the GetVkObject will recur on the alias Id + format::HandleId vulkan_alias{ format::kNullHandleId }; }; struct VulkanDeviceInfo : public VulkanObjectInfo diff --git a/framework/decode/vulkan_object_info_table_base.h b/framework/decode/vulkan_object_info_table_base.h index 07cfb5ae57..0f40ddf9bd 100644 --- a/framework/decode/vulkan_object_info_table_base.h +++ b/framework/decode/vulkan_object_info_table_base.h @@ -32,6 +32,7 @@ #include #include +#include #include GFXRECON_BEGIN_NAMESPACE(gfxrecon) @@ -49,6 +50,72 @@ struct has_handle_future template inline constexpr bool has_handle_future_v = has_handle_future::value; +// NOTE: There's nothing VulkanSpecific in these utilities +// TODO: Find a better home for these + +// Utility functors to implement const and non-const versions of getters in a common impl +template +using ConstCorrectMappedTypePtr = decltype(&(std::declval().begin()->second)); + +struct ObjectInfoGetterBase +{ + template + MappedTypePtr GetObjectInfoImpl(format::HandleId id, Map* map) + { + assert(map != nullptr); + + MappedTypePtr object_info = nullptr; + + if (id != 0) + { + const auto entry = map->find(id); + + if (entry != map->end()) + { + object_info = &entry->second; + } + } + + return object_info; + } + template + MappedTypePtr GetAliasingObjectInfoImpl(format::HandleId id, Map* map) + { + MappedTypePtr object_info = GetObjectInfoImpl(id, map); + if (object_info && (object_info->vulkan_alias != format::kNullHandleId)) + { + object_info = GetObjectInfoImpl(object_info->vulkan_alias, map); + // Note: if id has an alias and the alias is valid, the alias must not alias. Aliasing is single level. + assert(!object_info || (object_info->vulkan_alias == format::kNullHandleId)); + } + return object_info; + } +}; + +// Because of " explicit specialization in non-namespace scope" these must be implemented outside the class below +template +struct ObjectInfoGetter : public ObjectInfoGetterBase +{ + template > + MappedTypePtr operator()(format::HandleId id, Map* map) + { + return GetObjectInfoImpl(id, map); + } +}; + +// Specialize to handle physical device aliasing. See comments for VulkanPhysicalDeviceInfo::vulkan_alias +// Note: could do SFINAE a "has member" check on vulkan_alias, but as only physical device needs aliasing support at +// this time, it's simpler just to specialize for VulkanPhysicalDeviceInfo +template <> +struct ObjectInfoGetter : public ObjectInfoGetterBase +{ + template > + MappedTypePtr operator()(format::HandleId id, Map* map) + { + return GetAliasingObjectInfoImpl(id, map); + } +}; + class VulkanObjectInfoTableBase { protected: @@ -132,41 +199,13 @@ class VulkanObjectInfoTableBase template const T* GetVkObjectInfo(format::HandleId id, const std::unordered_map* map) const { - assert(map != nullptr); - - const T* object_info = nullptr; - - if (id != 0) - { - const auto entry = map->find(id); - - if (entry != map->end()) - { - object_info = &entry->second; - } - } - - return object_info; + return ObjectInfoGetter()(id, map); } template T* GetVkObjectInfo(format::HandleId id, std::unordered_map* map) { - assert(map != nullptr); - - T* object_info = nullptr; - - if (id != 0) - { - auto entry = map->find(id); - - if (entry != map->end()) - { - object_info = &entry->second; - } - } - - return object_info; + return ObjectInfoGetter()(id, map); } }; diff --git a/framework/decode/vulkan_replay_consumer_base.cpp b/framework/decode/vulkan_replay_consumer_base.cpp index 8ca73db6d3..1e6d324a84 100644 --- a/framework/decode/vulkan_replay_consumer_base.cpp +++ b/framework/decode/vulkan_replay_consumer_base.cpp @@ -1270,6 +1270,41 @@ void VulkanReplayConsumerBase::CheckReplayDeviceInfo(VulkanPhysicalDeviceInfo* p } } +// Needed to support physical device aliasing. See comments for VulkanPhysicalDeviceInfo::vulkan_alias +void VulkanReplayConsumerBase::SetPhysicalDeviceAlias(format::HandleId instance, + VulkanPhysicalDeviceInfo& replay_physical_device) +{ + if ((replay_physical_device.capture_id == format::kNullHandleId) || + (replay_physical_device.handle == VK_NULL_HANDLE)) + { + return; + } + + const format::HandleId match_id = replay_physical_device.capture_id; + const VkPhysicalDevice match_handle = replay_physical_device.handle; + + auto instance_info = object_info_table_->GetVkInstanceInfo(instance); + assert(instance_info); + + for (const format::HandleId capture_device : instance_info->capture_devices) + { + const VulkanPhysicalDeviceInfo* physical_device_info = + GetObjectInfoTable().GetVkPhysicalDeviceInfo(capture_device); + if (!physical_device_info || (physical_device_info->vulkan_alias != format::kNullHandleId)) + { + continue; // only single depth aliasing + } + + const format::HandleId& capture_id = physical_device_info->capture_id; + const VkPhysicalDevice& handle = physical_device_info->handle; + if ((handle == match_handle) && (capture_id != match_id)) + { + replay_physical_device.vulkan_alias = capture_id; + break; + } + } +} + void VulkanReplayConsumerBase::SetPhysicalDeviceInstanceInfo(VulkanInstanceInfo* instance_info, VulkanPhysicalDeviceInfo* physical_device_info, VkPhysicalDevice replay_device) @@ -1572,6 +1607,13 @@ bool VulkanReplayConsumerBase::GetOverrideDeviceGroup(VulkanInstanceInfo* return true; } +void VulkanReplayConsumerBase::GetMatchingDevice(VulkanPhysicalDeviceInfo* physical_device_info) +{ + VulkanInstanceInfo* instance_info = object_info_table_->GetVkInstanceInfo(physical_device_info->parent_id); + assert(instance_info); + GetMatchingDevice(instance_info, physical_device_info); +} + void VulkanReplayConsumerBase::GetMatchingDevice(VulkanInstanceInfo* instance_info, VulkanPhysicalDeviceInfo* physical_device_info) { diff --git a/framework/decode/vulkan_replay_consumer_base.h b/framework/decode/vulkan_replay_consumer_base.h index 9f436ca024..14098b3c0d 100644 --- a/framework/decode/vulkan_replay_consumer_base.h +++ b/framework/decode/vulkan_replay_consumer_base.h @@ -297,6 +297,11 @@ class VulkanReplayConsumerBase : public VulkanConsumer return get_instance_proc_addr_; } + void SetPhysicalDeviceAlias(format::HandleId instance, VulkanPhysicalDeviceInfo& replay_physical_device); + + // Need the side effects from this when creating vulkan devices from OpenXr + void GetMatchingDevice(VulkanPhysicalDeviceInfo* physical_device_info); + protected: const CommonObjectInfoTable& GetObjectInfoTable() const { return *object_info_table_; } From 8603ae8d8c652d4bd6e4f553f7dbd08a760c819d Mon Sep 17 00:00:00 2001 From: John Zulauf Date: Wed, 30 Oct 2024 15:54:49 -0600 Subject: [PATCH 59/70] Reorganize common options Move replay options around that are shared between multiple replay paths. For layered API support. --- framework/decode/dx_replay_options.h | 8 +-- framework/decode/replay_options.h | 49 +++++++++++-------- .../decode/vulkan_replay_consumer_base.cpp | 16 +++--- framework/decode/vulkan_replay_options.h | 8 +-- tools/tool_settings.h | 2 +- 5 files changed, 39 insertions(+), 44 deletions(-) diff --git a/framework/decode/dx_replay_options.h b/framework/decode/dx_replay_options.h index 43bd47b58a..af40732ff6 100644 --- a/framework/decode/dx_replay_options.h +++ b/framework/decode/dx_replay_options.h @@ -56,13 +56,7 @@ struct DxReplayOptions : public ReplayOptions bool override_object_names{ false }; bool enable_dump_resources{ false }; DumpResourcesTarget dump_resources_target{}; - - util::ScreenshotFormat screenshot_format{ util::ScreenshotFormat::kBmp }; - std::vector screenshot_ranges; - std::string screenshot_dir; - std::string screenshot_file_prefix{ kDefaultScreenshotFilePrefix }; - std::string replace_dir; - int32_t memory_usage{ kDefaultBatchingMemoryUsage }; + int32_t memory_usage{ kDefaultBatchingMemoryUsage }; }; GFXRECON_END_NAMESPACE(decode) diff --git a/framework/decode/replay_options.h b/framework/decode/replay_options.h index 1316f7360e..cfdf816b9e 100644 --- a/framework/decode/replay_options.h +++ b/framework/decode/replay_options.h @@ -41,27 +41,34 @@ struct ScreenshotRange struct ReplayOptions { - bool enable_validation_layer{ false }; - bool sync_queue_submissions{ false }; - bool enable_debug_device_lost{ false }; - bool create_dummy_allocations{ false }; - bool omit_null_hardware_buffers{ false }; - bool quit_after_measurement_frame_range{ false }; - bool flush_measurement_frame_range{ false }; - bool flush_inside_measurement_range{ false }; - bool force_windowed{ false }; - uint32_t windowed_width{ 0 }; - uint32_t windowed_height{ 0 }; - bool force_windowed_origin{ false }; - int32_t window_topleft_x{ 0 }; - int32_t window_topleft_y{ 0 }; - int32_t override_gpu_index{ -1 }; - std::string capture_filename; - bool enable_print_block_info{ false }; - int64_t block_index_from{ -1 }; - int64_t block_index_to{ -1 }; - int32_t num_pipeline_creation_jobs{ 0 }; - std::string asset_file_path; + bool enable_validation_layer{ false }; + bool sync_queue_submissions{ false }; + bool enable_debug_device_lost{ false }; + bool create_dummy_allocations{ false }; + bool omit_null_hardware_buffers{ false }; + bool quit_after_measurement_frame_range{ false }; + bool flush_measurement_frame_range{ false }; + bool flush_inside_measurement_range{ false }; + bool force_windowed{ false }; + uint32_t windowed_width{ 0 }; + uint32_t windowed_height{ 0 }; + bool force_windowed_origin{ false }; + int32_t window_topleft_x{ 0 }; + int32_t window_topleft_y{ 0 }; + int32_t override_gpu_index{ -1 }; + std::string capture_filename; + bool enable_print_block_info{ false }; + int64_t block_index_from{ -1 }; + int64_t block_index_to{ -1 }; + bool skip_failed_allocations{ false }; + bool remove_unsupported_features{ false }; + util::ScreenshotFormat screenshot_format{ util::ScreenshotFormat::kBmp }; + std::vector screenshot_ranges; + std::string screenshot_dir; + std::string screenshot_file_prefix{ kDefaultScreenshotFilePrefix }; + uint32_t screenshot_width, screenshot_height; + int32_t num_pipeline_creation_jobs{ 0 }; + std::string asset_file_path; }; GFXRECON_END_NAMESPACE(decode) diff --git a/framework/decode/vulkan_replay_consumer_base.cpp b/framework/decode/vulkan_replay_consumer_base.cpp index 1e6d324a84..d01a785efd 100644 --- a/framework/decode/vulkan_replay_consumer_base.cpp +++ b/framework/decode/vulkan_replay_consumer_base.cpp @@ -5794,7 +5794,7 @@ VkResult VulkanReplayConsumerBase::OverrideCreateShaderModule( (pShaderModule != nullptr) && !pShaderModule->IsNull()); auto original_info = pCreateInfo->GetPointer(); - if (original_result < 0 || options_.replace_dir.empty()) + if (original_result < 0 || options_.replace_shader_dir.empty()) { VkResult vk_res = func( device_info->handle, original_info, GetAllocationCallbacks(pAllocator), pShaderModule->GetHandlePointer()); @@ -5833,7 +5833,7 @@ VkResult VulkanReplayConsumerBase::OverrideCreateShaderModule( const size_t orig_size = original_info->codeSize; uint64_t handle_id = *pShaderModule->GetPointer(); std::string file_name = "sh" + std::to_string(handle_id); - std::string file_path = util::filepath::Join(options_.replace_dir, file_name); + std::string file_path = util::filepath::Join(options_.replace_shader_dir, file_name); FILE* fp = nullptr; int32_t result = util::platform::FileOpen(&fp, file_path.c_str(), "rb"); @@ -9746,7 +9746,7 @@ void VulkanReplayConsumerBase::OverrideUpdateDescriptorSets( uint64_t handle_id = pipelines[i]; std::string file_name = "sh" + std::to_string(handle_id) + "_" + std::to_string(stage_create_info.stage); - std::string file_path = util::filepath::Join(options_.replace_dir, file_name); + std::string file_path = util::filepath::Join(options_.replace_shader_dir, file_name); FILE* fp = nullptr; int32_t result = util::platform::FileOpen(&fp, file_path.c_str(), "rb"); @@ -9794,7 +9794,7 @@ VkResult VulkanReplayConsumerBase::OverrideCreateGraphicsPipelines( std::vector> replaced_file_code; auto* maybe_replaced_create_infos = in_p_create_infos; - if (original_result >= 0 && !options_.replace_dir.empty()) + if (original_result >= 0 && !options_.replace_shader_dir.empty()) { uint32_t num_bytes = graphics::vulkan_struct_deep_copy(in_p_create_infos, create_info_count, nullptr); create_info_data.resize(num_bytes); @@ -9885,7 +9885,7 @@ VkResult VulkanReplayConsumerBase::OverrideCreateComputePipelines( size_t orig_size = create_info->codeSize; uint64_t handle_id = shaders[i]; std::string file_name = "sh" + std::to_string(handle_id); - std::string file_path = util::filepath::Join(options_.replace_dir, file_name); + std::string file_path = util::filepath::Join(options_.replace_shader_dir, file_name); FILE* fp = nullptr; int32_t result = util::platform::FileOpen(&fp, file_path.c_str(), "rb"); @@ -9927,7 +9927,7 @@ VkResult VulkanReplayConsumerBase::OverrideCreateShadersEXT( std::vector> replaced_file_code; auto* maybe_replaced_create_infos = in_p_create_infos; - if (original_result >= 0 && !options_.replace_dir.empty()) + if (original_result >= 0 && !options_.replace_shader_dir.empty()) { uint32_t num_bytes = graphics::vulkan_struct_deep_copy(in_p_create_infos, create_info_count, nullptr); create_info_data.resize(num_bytes); @@ -10098,7 +10098,7 @@ std::function()> VulkanReplayConsumer auto create_infos = reinterpret_cast(create_info_data.data()); std::vector> replaced_file_code; - if (returnValue >= 0 && !options_.replace_dir.empty()) + if (returnValue >= 0 && !options_.replace_shader_dir.empty()) { replaced_file_code = ReplaceShaders(createInfoCount, create_infos, pipelines.data()); } @@ -10233,7 +10233,7 @@ VulkanReplayConsumerBase::AsyncCreateShadersEXT(const ApiCallInfo& auto create_infos = reinterpret_cast(create_info_data.data()); std::vector> replaced_file_code; - if (returnValue >= 0 && !options_.replace_dir.empty()) + if (returnValue >= 0 && !options_.replace_shader_dir.empty()) { replaced_file_code = ReplaceShaders(createInfoCount, create_infos, shaders.data()); } diff --git a/framework/decode/vulkan_replay_options.h b/framework/decode/vulkan_replay_options.h index 7771a66d58..65e6c75f5c 100644 --- a/framework/decode/vulkan_replay_options.h +++ b/framework/decode/vulkan_replay_options.h @@ -57,9 +57,7 @@ static constexpr int kUnspecifiedColorAttachment = -1; struct VulkanReplayOptions : public ReplayOptions { bool enable_vulkan{ true }; - bool skip_failed_allocations{ false }; bool omit_pipeline_cache_data{ false }; - bool remove_unsupported_features{ false }; bool use_colorspace_fallback{ false }; bool offscreen_swapchain_frame_boundary{ false }; util::SwapchainOption swapchain_option{ util::SwapchainOption::kVirtual }; @@ -67,13 +65,9 @@ struct VulkanReplayOptions : public ReplayOptions int32_t override_gpu_group_index{ -1 }; int32_t surface_index{ -1 }; CreateResourceAllocator create_resource_allocator; - util::ScreenshotFormat screenshot_format{ util::ScreenshotFormat::kBmp }; - std::vector screenshot_ranges; - std::string screenshot_dir; - std::string screenshot_file_prefix{ kDefaultScreenshotFilePrefix }; uint32_t screenshot_width, screenshot_height; float screenshot_scale; - std::string replace_dir; + std::string replace_shader_dir; SkipGetFenceStatus skip_get_fence_status{ SkipGetFenceStatus::NoSkip }; std::vector skip_get_fence_ranges; bool wait_before_present{ false }; diff --git a/tools/tool_settings.h b/tools/tool_settings.h index f459f8362a..239d6ec9a0 100644 --- a/tools/tool_settings.h +++ b/tools/tool_settings.h @@ -1019,7 +1019,7 @@ GetVulkanReplayOptions(const gfxrecon::util::ArgumentParser& arg_parse replay_options.virtual_swapchain_skip_blit = true; } - replay_options.replace_dir = arg_parser.GetArgumentValue(kShaderReplaceArgument); + replay_options.replace_shader_dir = arg_parser.GetArgumentValue(kShaderReplaceArgument); replay_options.create_resource_allocator = GetCreateResourceAllocatorFunc(arg_parser, filename, replay_options, tracked_object_info_table); From be443158a2c7bb77c2379cd265269d8162fd0af5 Mon Sep 17 00:00:00 2001 From: Mike Schuchardt Date: Tue, 17 Sep 2024 16:14:06 -0700 Subject: [PATCH 60/70] Use libsigchain on Android Install page guard signal handler using AddSpecialSignalHandlerFn from the libsigchain in the Android runtime. This prevents traced applications from replacing our signal handler when they try to install thier own signal handlers with sigaction. This is implicitly enabled when using page_guard on Android and falls back to sigaction if libsigchain isn't found or the add call fails. If the signal handler watcher is enabled, the watcher thread will exit as soon as our handler is successfully added with libsigchain. --- framework/util/page_guard_manager.cpp | 147 +++++++++++++++++++------- framework/util/page_guard_manager.h | 18 ++++ 2 files changed, 127 insertions(+), 38 deletions(-) diff --git a/framework/util/page_guard_manager.cpp b/framework/util/page_guard_manager.cpp index f21bc5ae12..a60a1fc74c 100644 --- a/framework/util/page_guard_manager.cpp +++ b/framework/util/page_guard_manager.cpp @@ -29,6 +29,7 @@ #include "util/platform.h" #include +#include #include GFXRECON_BEGIN_NAMESPACE(gfxrecon) @@ -119,7 +120,7 @@ static stack_t s_old_stack = {}; static uint8_t* s_alt_stack = nullptr; static size_t s_alt_stack_size = 0; -static void PageGuardExceptionHandler(int id, siginfo_t* info, void* data) +static bool TryHandlePageGuardException(int id, siginfo_t* info, void* data) { bool handled = false; PageGuardManager* manager = PageGuardManager::Get(); @@ -182,8 +183,12 @@ static void PageGuardExceptionHandler(int id, siginfo_t* info, void* data) #endif handled = manager->HandleGuardPageViolation(info->si_addr, is_write, true); } + return handled; +} - if (!handled) +static void PageGuardExceptionHandler(int id, siginfo_t* info, void* data) +{ + if (!TryHandlePageGuardException(id, info, data)) { // This was not a SIGSEGV signal for an address that was protected with mprotect(). // Raise the original signal handler for this case. @@ -214,6 +219,18 @@ void PageGuardManager::InitializeSystemExceptionContext(void) s_alt_stack = new uint8_t[s_alt_stack_size]; } #endif +#if defined(__ANDROID__) + AddSpecialSignalHandlerFn = + reinterpret_cast(dlsym(RTLD_DEFAULT, "AddSpecialSignalHandlerFn")); + RemoveSpecialSignalHandlerFn = + reinterpret_cast(dlsym(RTLD_DEFAULT, "RemoveSpecialSignalHandlerFn")); + if (!AddSpecialSignalHandlerFn || !RemoveSpecialSignalHandlerFn) + { + AddSpecialSignalHandlerFn = nullptr; + RemoveSpecialSignalHandlerFn = nullptr; + GFXRECON_LOG_WARNING("PageGuardManager could not find libsigchain symbols. Falling back to sigaction.") + } +#endif } PageGuardManager::PageGuardManager() : @@ -342,6 +359,9 @@ bool PageGuardManager::CheckSignalHandler() void* PageGuardManager::SignalHandlerWatcher(void* args) { while (instance_->enable_signal_handler_watcher_ && +#if defined(__ANDROID__) + !instance_->libsigchain_active_ && +#endif (instance_->signal_handler_watcher_max_restores_ < 0 || signal_handler_watcher_restores_ < static_cast(instance_->signal_handler_watcher_max_restores_))) { @@ -498,43 +518,83 @@ void PageGuardManager::AddExceptionHandler() exception_handler_count_ = 1; } #else - // Retrieve the current SIGSEGV handler info before replacing the current signal handler to determine if our - // replacement signal handler should use an alternate signal stack. - int result = sigaction(MEMPROT_SIGNAL, nullptr, &s_old_sigaction); - - if (result != -1) +#if defined(__ANDROID__) + if (!libsigchain_active_ && AddSpecialSignalHandlerFn) { - struct sigaction sa = {}; - - sa.sa_flags = SA_SIGINFO; - sigemptyset(&sa.sa_mask); - sa.sa_sigaction = PageGuardExceptionHandler; + static std::jmp_buf abort_env; - if ((s_old_sigaction.sa_flags & SA_ONSTACK) == SA_ONSTACK) - { - // Replace the current alternate signal stack with one that is guatanteed to be valid for the page guard - // signal handler. - stack_t new_stack; - new_stack.ss_sp = s_alt_stack; - new_stack.ss_flags = 0; - new_stack.ss_size = s_alt_stack_size; + struct sigaction orig_abort_sa = {}; - sigaltstack(&new_stack, &s_old_stack); + struct sigaction abort_sa = {}; + abort_sa.sa_flags = SA_SIGINFO; + sigemptyset(&abort_sa.sa_mask); + abort_sa.sa_sigaction = [](int id, siginfo_t* info, void* data) { std::longjmp(abort_env, 0); }; - sa.sa_flags |= SA_ONSTACK; + // Install temporary handler for SIGABRT to catch failure in AddSpecialSignalHandlerFn + sigaction(SIGABRT, &abort_sa, &orig_abort_sa); + if (setjmp(abort_env) == 0) + { + // Install special signal handler with libsigchain + SigchainAction sc = {}; + sc.sc_sigaction = TryHandlePageGuardException; + sigemptyset(&sc.sc_mask); + AddSpecialSignalHandlerFn(MEMPROT_SIGNAL, &sc); + + // If we got this far then it was successful + libsigchain_active_ = true; + exception_handler_ = reinterpret_cast(TryHandlePageGuardException); + exception_handler_count_ = 1; + } + else + { + GFXRECON_LOG_WARNING( + "PageGuardManager libsigchain AddSpecialSignalHandlerFn failed, falling back to sigaction"); } - result = sigaction(MEMPROT_SIGNAL, &sa, nullptr); - } - - if (result != -1) - { - exception_handler_ = reinterpret_cast(PageGuardExceptionHandler); - exception_handler_count_ = 1; + // Restore original SIGABRT handler + sigaction(SIGABRT, &orig_abort_sa, nullptr); } - else + if (!libsigchain_active_) +#endif { - GFXRECON_LOG_ERROR("PageGuardManager failed to register exception handler (errno = %d)", errno); + // Retrieve the current SIGSEGV handler info before replacing the current signal handler to determine if our + // replacement signal handler should use an alternate signal stack. + int result = sigaction(MEMPROT_SIGNAL, nullptr, &s_old_sigaction); + + if (result != -1) + { + struct sigaction sa = {}; + + sa.sa_flags = SA_SIGINFO; + sigemptyset(&sa.sa_mask); + sa.sa_sigaction = PageGuardExceptionHandler; + + if ((s_old_sigaction.sa_flags & SA_ONSTACK) == SA_ONSTACK) + { + // Replace the current alternate signal stack with one that is guaranteed to be valid for the page + // guard signal handler. + stack_t new_stack; + new_stack.ss_sp = s_alt_stack; + new_stack.ss_flags = 0; + new_stack.ss_size = s_alt_stack_size; + + sigaltstack(&new_stack, &s_old_stack); + + sa.sa_flags |= SA_ONSTACK; + } + + result = sigaction(MEMPROT_SIGNAL, &sa, nullptr); + } + + if (result != -1) + { + exception_handler_ = reinterpret_cast(PageGuardExceptionHandler); + exception_handler_count_ = 1; + } + else + { + GFXRECON_LOG_ERROR("PageGuardManager failed to register exception handler (errno = %d)", errno); + } } #endif } @@ -578,16 +638,27 @@ void PageGuardManager::ClearExceptionHandler(void* exception_handler) GetLastError()); } #else - if ((s_old_sigaction.sa_flags & SA_ONSTACK) == SA_ONSTACK) +#if defined(__ANDROID__) + if (libsigchain_active_) { - // Restore the alternate signal stack. - sigaltstack(&s_old_stack, nullptr); + RemoveSpecialSignalHandlerFn(MEMPROT_SIGNAL, + reinterpret_cast(exception_handler)); + libsigchain_active_ = false; } - - // Restore the old signal handler. - if (sigaction(MEMPROT_SIGNAL, &s_old_sigaction, nullptr) == -1) + else +#endif { - GFXRECON_LOG_ERROR("PageGuardManager failed to remove exception handler (errno= %d)", errno); + if ((s_old_sigaction.sa_flags & SA_ONSTACK) == SA_ONSTACK) + { + // Restore the alternate signal stack. + sigaltstack(&s_old_stack, nullptr); + } + + // Restore the old signal handler. + if (sigaction(MEMPROT_SIGNAL, &s_old_sigaction, nullptr) == -1) + { + GFXRECON_LOG_ERROR("PageGuardManager failed to remove exception handler (errno= %d)", errno); + } } #endif } diff --git a/framework/util/page_guard_manager.h b/framework/util/page_guard_manager.h index a1642ecefb..489bcccaaf 100644 --- a/framework/util/page_guard_manager.h +++ b/framework/util/page_guard_manager.h @@ -55,6 +55,18 @@ #endif #endif +#if defined(__ANDROID__) +struct SigchainAction +{ + bool (*sc_sigaction)(int, siginfo_t*, void*); + sigset_t sc_mask; + uint64_t sc_flags; +}; + +typedef void (*PFN_AddSpecialSignalHandlerFn)(int signal, SigchainAction* sa); +typedef void (*PFN_RemoveSpecialSignalHandlerFn)(int signal, bool (*fn)(int, siginfo_t*, void*)); +#endif + GFXRECON_BEGIN_NAMESPACE(gfxrecon) GFXRECON_BEGIN_NAMESPACE(util) @@ -300,6 +312,12 @@ class PageGuardManager std::unordered_set uffd_fault_causing_threads; #endif +#if defined(__ANDROID__) + PFN_AddSpecialSignalHandlerFn AddSpecialSignalHandlerFn = nullptr; + PFN_RemoveSpecialSignalHandlerFn RemoveSpecialSignalHandlerFn = nullptr; + bool libsigchain_active_ = false; +#endif + bool InitializeUserFaultFd(); void UffdTerminate(); uint32_t UffdBlockFaultingThreads(); From 3a5422bd17394d0637f8c47ee768c2a91d0d3abe Mon Sep 17 00:00:00 2001 From: ziga-lunarg <87310389+ziga-lunarg@users.noreply.github.com> Date: Fri, 1 Nov 2024 21:02:08 +0100 Subject: [PATCH 61/70] Fix descriptor info assert during capture (#1858) * Fix descriptor info assert during capture * Combine asserts --- framework/encode/vulkan_state_tracker.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/framework/encode/vulkan_state_tracker.cpp b/framework/encode/vulkan_state_tracker.cpp index 9df3c51ec3..40a849e03d 100644 --- a/framework/encode/vulkan_state_tracker.cpp +++ b/framework/encode/vulkan_state_tracker.cpp @@ -1335,8 +1335,6 @@ void VulkanStateTracker::TrackUpdateDescriptorSetWithTemplate(VkDescriptorSet { auto& binding = wrapper->bindings[current_binding]; - assert(binding.images != nullptr); - // Check count for consecutive updates. uint32_t current_writes = std::min(current_count, (binding.count - current_array_element)); @@ -1352,6 +1350,9 @@ void VulkanStateTracker::TrackUpdateDescriptorSetWithTemplate(VkDescriptorSet binding.type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || binding.type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; + assert((immutable_image && binding.images != nullptr) || + (!immutable_image && binding.storage_images != nullptr)); + format::HandleId* dst_sampler_ids = &binding.sampler_ids[current_array_element]; format::HandleId* dst_image_ids = &binding.handle_ids[current_array_element]; VkDescriptorImageInfo* dst_info = immutable_image ? &binding.images[current_array_element] From 0a55afc7268888a5d16990e97f9e3ff4887be638 Mon Sep 17 00:00:00 2001 From: Panagiotis Apostolou <104391532+panos-lunarg@users.noreply.github.com> Date: Sat, 2 Nov 2024 07:47:09 +0100 Subject: [PATCH 62/70] Fix table in USAGE_desktop_vulkan.md (#1857) --- USAGE_desktop_Vulkan.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/USAGE_desktop_Vulkan.md b/USAGE_desktop_Vulkan.md index 0999a988e6..60183a813f 100644 --- a/USAGE_desktop_Vulkan.md +++ b/USAGE_desktop_Vulkan.md @@ -261,9 +261,8 @@ option values. | Capture Specific Frames | GFXRECON_CAPTURE_FRAMES | STRING | Specify one or more comma-separated frame ranges to capture. Each range will be written to its own file. A frame range can be specified as a single value, to specify a single frame to capture, or as two hyphenated values, to specify the first and last frame to capture. Frame ranges should be specified in ascending order and cannot overlap. Note that frame numbering is 1-based (i.e. the first frame is frame 1). Example: `200,301-305` will create two capture files, one containing a single frame and one containing five frames. Default is: Empty string (all frames are captured). | | Quit after capturing frame ranges | GFXRECON_QUIT_AFTER_CAPTURE_FRAMES | BOOL | Setting it to `true` will force the application to terminate once all frame ranges specified by `GFXRECON_CAPTURE_FRAMES` have been captured. Default is: `false` | | Hotkey Capture Trigger | GFXRECON_CAPTURE_TRIGGER | STRING | Specify a hotkey (any one of F1-F12, TAB, CONTROL) that will be used to start/stop capture. Example: `F3` will set the capture trigger to F3 hotkey. One capture file will be generated for each pair of start/stop hotkey presses. Default is: Empty string (hotkey capture trigger is disabled). | -| Hotkey Capture Trigger Frames | GFXRECON_CAPTURE_TRIGGER_FRAMES | STRING | Specify a limit on the number of frames to be captured via hotkey. Example: `1` will capture exactly one frame when the trigger key is pressed. Default is: Empty string (no limit) - -| Use asset file | GFXRECON_CAPTURE_USE_ASSET_FILE | BOOL | When set to `true` assets (images, buffers and descriptors) will be stored separately into an asset file instead of the capture file. | +| Hotkey Capture Trigger Frames | GFXRECON_CAPTURE_TRIGGER_FRAMES | STRING | Specify a limit on the number of frames to be captured via hotkey. Example: `1` will capture exactly one frame when the trigger key is pressed. Default is: Empty string (no limit) | +| Use asset file | GFXRECON_CAPTURE_USE_ASSET_FILE | BOOL | When set to `true` assets (images, buffers and descriptors) will be stored separately into an asset file instead of the capture file. | | Capture Specific GPU Queue Submits | GFXRECON_CAPTURE_QUEUE_SUBMITS | STRING | Specify one or more comma-separated GPU queue submit call ranges to capture. Queue submit calls are `vkQueueSubmit` for Vulkan and `ID3D12CommandQueue::ExecuteCommandLists` for DX12. Queue submit ranges work as described above in `GFXRECON_CAPTURE_FRAMES` but on GPU queue submit calls instead of frames. The index is 0-based. Default is: Empty string (all queue submits are captured). | | Capture File Compression Type | GFXRECON_CAPTURE_COMPRESSION_TYPE | STRING | Compression format to use with the capture file. Valid values are: `LZ4`, `ZLIB`, `ZSTD`, and `NONE`. Default is: `LZ4` | | Capture File Timestamp | GFXRECON_CAPTURE_FILE_TIMESTAMP | BOOL | Add a timestamp to the capture file as described by [Timestamps](#timestamps). Default is: `true` | From e5fe931c76c1307021bf3f35400ced6156f44c8e Mon Sep 17 00:00:00 2001 From: ziga-lunarg <87310389+ziga-lunarg@users.noreply.github.com> Date: Tue, 5 Nov 2024 11:27:41 +0100 Subject: [PATCH 63/70] Add missing swapchain image destroy (#1861) --- framework/encode/vulkan_state_tracker.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/encode/vulkan_state_tracker.cpp b/framework/encode/vulkan_state_tracker.cpp index 40a849e03d..40f3eb4f16 100644 --- a/framework/encode/vulkan_state_tracker.cpp +++ b/framework/encode/vulkan_state_tracker.cpp @@ -1935,6 +1935,7 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::SwapchainKHRWrapper* wrap std::unique_lock lock(state_table_mutex_); for (auto entry : wrapper->child_images) { + DestroyState(entry); state_table_.RemoveWrapper(entry); } } From f038193e021337e68e159f87e39e7ad25c746ea0 Mon Sep 17 00:00:00 2001 From: Fabian Schmidt <165773884+fabian-lunarg@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:15:11 +0100 Subject: [PATCH 64/70] ShaderGroupHandles: tracking/trimming (#1844) - various changes to state-writer: always provide shader-group-handles - track bound pipelines in replay, retrieve map of old/new handles in CmdTraceRays - Add a generic murmur32-implementation to util/hash.h - Add a 'linear hashmap' type (cpu-flavor), populate with handles - Add unit-test for linear_hashmap --- android/framework/util/CMakeLists.txt | 1 + framework/decode/vulkan_object_info.h | 1 + .../decode/vulkan_replay_consumer_base.cpp | 98 ++++-- .../decode/vulkan_replay_consumer_base.h | 5 + framework/encode/vulkan_capture_manager.cpp | 10 +- framework/encode/vulkan_handle_wrappers.h | 1 + framework/encode/vulkan_state_writer.cpp | 32 ++ framework/encode/vulkan_state_writer.h | 2 + .../generated_vulkan_replay_consumer.cpp | 8 +- .../vulkan_generators/replay_overrides.json | 3 +- .../vulkan_replay_consumer_body_generator.py | 2 + framework/graphics/vulkan_device_util.cpp | 13 +- framework/graphics/vulkan_device_util.h | 1 + .../graphics/vulkan_shader_group_handle.h | 21 +- framework/util/CMakeLists.txt | 2 + framework/util/hash.h | 56 ++++ framework/util/linear_hashmap.h | 285 ++++++++++++++++++ framework/util/test/test_linear_hashmap.cpp | 139 +++++++++ 18 files changed, 637 insertions(+), 43 deletions(-) create mode 100644 framework/util/linear_hashmap.h create mode 100644 framework/util/test/test_linear_hashmap.cpp diff --git a/android/framework/util/CMakeLists.txt b/android/framework/util/CMakeLists.txt index 9addaa934f..b840d13f8a 100644 --- a/android/framework/util/CMakeLists.txt +++ b/android/framework/util/CMakeLists.txt @@ -21,6 +21,7 @@ target_sources(gfxrecon_util ${GFXRECON_SOURCE_DIR}/framework/util/json_util.cpp ${GFXRECON_SOURCE_DIR}/framework/util/keyboard.h ${GFXRECON_SOURCE_DIR}/framework/util/keyboard.cpp + ${GFXRECON_SOURCE_DIR}/framework/util/linear_hashmap.h ${GFXRECON_SOURCE_DIR}/framework/util/logging.h ${GFXRECON_SOURCE_DIR}/framework/util/logging.cpp ${GFXRECON_SOURCE_DIR}/framework/util/lz4_compressor.h diff --git a/framework/decode/vulkan_object_info.h b/framework/decode/vulkan_object_info.h index 605f3d5231..4b537cf7df 100644 --- a/framework/decode/vulkan_object_info.h +++ b/framework/decode/vulkan_object_info.h @@ -622,6 +622,7 @@ struct VulkanCommandBufferInfo : public VulkanPoolObjectInfo bool is_frame_boundary{ false }; std::vector frame_buffer_ids; std::unordered_map image_layout_barriers; + format::HandleId bound_pipeline_id = format::kNullHandleId; }; struct VulkanRenderPassInfo : public VulkanObjectInfo diff --git a/framework/decode/vulkan_replay_consumer_base.cpp b/framework/decode/vulkan_replay_consumer_base.cpp index d01a785efd..7389820051 100644 --- a/framework/decode/vulkan_replay_consumer_base.cpp +++ b/framework/decode/vulkan_replay_consumer_base.cpp @@ -48,6 +48,7 @@ #include "util/hash.h" #include "util/platform.h" #include "util/logging.h" +#include "util/linear_hashmap.h" #include "spirv_reflect.h" @@ -7841,6 +7842,7 @@ VkResult VulkanReplayConsumerBase::OverrideCreateRayTracingPipelinesKHR( &pPipelines->GetPointer()[createInfoCount]); } + // NOTE: this is basically never true and does not look like it's going to change soon if (device_info->property_feature_info.feature_rayTracingPipelineShaderGroupHandleCaptureReplay) { // Modify pipeline create infos with capture replay flag and data. @@ -7956,10 +7958,6 @@ VkResult VulkanReplayConsumerBase::OverrideCreateRayTracingPipelinesKHR( } else { - GFXRECON_LOG_ERROR_ONCE("The replay used vkCreateRayTracingPipelinesKHR, which may require the " - "rayTracingPipelineShaderGroupHandleCaptureReplay feature for accurate capture and " - "replay. The replay device does not support this feature, so replay may fail."); - if (omitted_pipeline_cache_data_) { AllowCompileDuringPipelineCreation(createInfoCount, @@ -8187,14 +8185,6 @@ VulkanReplayConsumerBase::OverrideGetRayTracingShaderGroupHandlesKHR(PFN_vkGetRa assert((device_info != nullptr) && (pipeline_info != nullptr) && (pData != nullptr) && (pData->GetOutputPointer() != nullptr)); - if (!device_info->property_feature_info.feature_rayTracingPipelineShaderGroupHandleCaptureReplay) - { - GFXRECON_LOG_WARNING_ONCE( - "The captured application used vkGetRayTracingShaderGroupHandlesKHR, which may require the " - "rayTracingPipelineShaderGroupHandleCaptureReplay feature for accurate capture and replay. The replay " - "device does not support this feature, so replay may fail."); - } - VkDevice device = device_info->handle; VkPipeline pipeline = pipeline_info->handle; uint8_t* output_data = pData->GetOutputPointer(); @@ -8263,8 +8253,10 @@ VkResult VulkanReplayConsumerBase::OverrideGetAndroidHardwareBufferPropertiesAND void VulkanReplayConsumerBase::ClearCommandBufferInfo(VulkanCommandBufferInfo* command_buffer_info) { + GFXRECON_ASSERT(command_buffer_info != nullptr) command_buffer_info->is_frame_boundary = false; command_buffer_info->frame_buffer_ids.clear(); + command_buffer_info->bound_pipeline_id = format::kNullHandleId; } VkResult VulkanReplayConsumerBase::OverrideBeginCommandBuffer( @@ -8388,7 +8380,26 @@ void VulkanReplayConsumerBase::OverrideCmdInsertDebugUtilsLabelEXT( { command_buffer_info->is_frame_boundary = true; } -}; +} + +void VulkanReplayConsumerBase::OverrideCmdBindPipeline(PFN_vkCmdBindPipeline func, + VulkanCommandBufferInfo* command_buffer_info, + VkPipelineBindPoint pipelineBindPoint, + VulkanPipelineInfo* pipeline_info) +{ + VkCommandBuffer command_buffer = VK_NULL_HANDLE; + VkPipeline pipeline = VK_NULL_HANDLE; + + if (command_buffer_info != nullptr && pipeline_info != nullptr) + { + command_buffer = command_buffer_info->handle; + pipeline = pipeline_info->handle; + + // keep track of currently bound pipeline + command_buffer_info->bound_pipeline_id = pipeline_info->capture_id; + } + func(command_buffer, pipelineBindPoint, pipeline); +} void VulkanReplayConsumerBase::OverrideCmdBeginRenderPass( PFN_vkCmdBeginRenderPass func, @@ -8504,7 +8515,7 @@ void VulkanReplayConsumerBase::OverrideCmdTraceRaysKHR( // identify buffer(s) by their device-address const VulkanDeviceInfo* device_info = GetObjectInfoTable().GetVkDeviceInfo(command_buffer_info->parent_id); - const auto& address_tracker = GetDeviceAddressTracker(device_info->handle); + const auto& address_tracker = GetDeviceAddressTracker(device_info->handle); auto address_remap = [&address_tracker](VkStridedDeviceAddressRegionKHR* address_region) { if (address_region->size > 0) @@ -8516,7 +8527,7 @@ void VulkanReplayConsumerBase::OverrideCmdTraceRaysKHR( { uint64_t offset = address_region->deviceAddress - buffer_info->capture_address; - // in-place address-remap via const-cast + // in-place address-remap address_region->deviceAddress = buffer_info->replay_address + offset; } else @@ -8526,26 +8537,65 @@ void VulkanReplayConsumerBase::OverrideCmdTraceRaysKHR( } } }; - // in-place remap: capture-addresses -> replay-addresses - address_remap(in_pRaygenShaderBindingTable); - address_remap(in_pMissShaderBindingTable); - address_remap(in_pHitShaderBindingTable); - address_remap(in_pCallableShaderBindingTable); + + auto bound_pipeline = GetObjectInfoTable().GetVkPipelineInfo(command_buffer_info->bound_pipeline_id); + GFXRECON_ASSERT(bound_pipeline != nullptr) + + // NOTE: expect this map to be populated here, but not for older captures using trimming. + auto& shader_group_handles = bound_pipeline->shader_group_handle_map; + + // figure out if the captured group-handles are valid for replay + bool valid_group_handles = !shader_group_handles.empty(); + bool valid_sbt_alignment = true; const VulkanPhysicalDeviceInfo* physical_device_info = GetObjectInfoTable().GetVkPhysicalDeviceInfo(device_info->parent_id); - if (physical_device_info && physical_device_info->replay_device_info->raytracing_properties) + if (physical_device_info != nullptr && physical_device_info->replay_device_info->raytracing_properties) { const auto& replay_props = *physical_device_info->replay_device_info->raytracing_properties; + if (physical_device_info->shaderGroupHandleSize != replay_props.shaderGroupHandleSize || physical_device_info->shaderGroupHandleAlignment != replay_props.shaderGroupHandleAlignment || physical_device_info->shaderGroupBaseAlignment != replay_props.shaderGroupBaseAlignment) { - // TODO: binding-table re-assembly + valid_sbt_alignment = false; + } + } + + for (const auto& [lhs, rhs] : shader_group_handles) + { + if (lhs != rhs) + { + valid_group_handles = false; + break; + } + } + + if (!(valid_group_handles && valid_sbt_alignment)) + { + if (!valid_sbt_alignment) + { // TODO: remove TODO/warning when issue #1526 is solved - GFXRECON_LOG_WARNING_ONCE( - "OverrideCmdTraceRaysKHR: mismatching shader-binding-table size or alignments") + GFXRECON_LOG_WARNING_ONCE("OverrideCmdTraceRaysKHR: invalid shader-binding-table (handle-size and/or " + "alignments mismatch) -> TODO: run SBT re-assembly"); + + // TODO: create shadow-SBT-buffer, remap addresses to that + } + else + { + // in-place remap: capture-addresses -> replay-addresses + address_remap(in_pRaygenShaderBindingTable); + address_remap(in_pMissShaderBindingTable); + address_remap(in_pHitShaderBindingTable); + address_remap(in_pCallableShaderBindingTable); + } + + // TODO: run sbt-handle replacer. (create linear hashmap (x), [create shadow-buffer], run compute-shader) + util::linear_hashmap hashmap; + for (const auto& [lhs, rhs] : shader_group_handles) + { + hashmap.put(lhs, rhs); } } diff --git a/framework/decode/vulkan_replay_consumer_base.h b/framework/decode/vulkan_replay_consumer_base.h index 14098b3c0d..eca6c61c8c 100644 --- a/framework/decode/vulkan_replay_consumer_base.h +++ b/framework/decode/vulkan_replay_consumer_base.h @@ -1269,6 +1269,11 @@ class VulkanReplayConsumerBase : public VulkanConsumer uint64_t presentid, uint64_t timeout); + void OverrideCmdBindPipeline(PFN_vkCmdBindPipeline func, + VulkanCommandBufferInfo* command_buffer_info, + VkPipelineBindPoint pipelineBindPoint, + VulkanPipelineInfo* pipeline_info); + void OverrideCmdBeginRenderPass(PFN_vkCmdBeginRenderPass func, VulkanCommandBufferInfo* command_buffer_info, StructPointerDecoder* render_pass_begin_info_decoder, diff --git a/framework/encode/vulkan_capture_manager.cpp b/framework/encode/vulkan_capture_manager.cpp index 04640f9dbe..8be1a4e8c1 100644 --- a/framework/encode/vulkan_capture_manager.cpp +++ b/framework/encode/vulkan_capture_manager.cpp @@ -1405,12 +1405,14 @@ VulkanCaptureManager::OverrideCreateRayTracingPipelinesKHR(VkDevice { auto pipeline_wrapper = vulkan_wrappers::GetWrapper(pPipelines[i]); + // We need to set device_id here because some hardware may not have the feature + // rayTracingPipelineShaderGroupHandleCaptureReplay so the device_id cannot be set by + // VulkanStateTracker::TrackRayTracingShaderGroupHandles + pipeline_wrapper->device_id = vulkan_wrappers::GetWrappedId(device); + pipeline_wrapper->num_shader_group_handles = pCreateInfos[i].groupCount; + if (deferred_operation_wrapper) { - // We need to set device_id here because some hardware may not have the feature - // rayTracingPipelineShaderGroupHandleCaptureReplay so the device_id cannot be set by - // VulkanStateTracker::TrackRayTracingShaderGroupHandles - pipeline_wrapper->device_id = vulkan_wrappers::GetWrappedId(device); pipeline_wrapper->deferred_operation.handle_id = deferred_operation_wrapper->handle_id; pipeline_wrapper->deferred_operation.create_call_id = deferred_operation_wrapper->create_call_id; pipeline_wrapper->deferred_operation.create_parameters = deferred_operation_wrapper->create_parameters; diff --git a/framework/encode/vulkan_handle_wrappers.h b/framework/encode/vulkan_handle_wrappers.h index 2f4d60c09e..2d49eecd43 100644 --- a/framework/encode/vulkan_handle_wrappers.h +++ b/framework/encode/vulkan_handle_wrappers.h @@ -380,6 +380,7 @@ struct PipelineWrapper : public HandleWrapper // Ray tracing pipeline's shader group handle data format::HandleId device_id{ format::kNullHandleId }; std::vector shader_group_handle_data; + uint32_t num_shader_group_handles{ 0 }; vulkan_state_info::CreateDependencyInfo deferred_operation; // TODO: Base pipeline diff --git a/framework/encode/vulkan_state_writer.cpp b/framework/encode/vulkan_state_writer.cpp index fd2356b9b7..66d813bba5 100644 --- a/framework/encode/vulkan_state_writer.cpp +++ b/framework/encode/vulkan_state_writer.cpp @@ -725,6 +725,7 @@ void VulkanStateWriter::WritePipelineState(const VulkanStateTable& state_table) wrapper->handle_id, wrapper->shader_group_handle_data.size(), wrapper->shader_group_handle_data.data()); + } if (processed_ray_tracing_pipelines_khr.find(wrapper->create_parameters.get()) == @@ -860,6 +861,37 @@ void VulkanStateWriter::WritePipelineState(const VulkanStateTable& state_table) { DestroyTemporaryDeviceObject(format::ApiCall_vkDestroyPipelineLayout, entry.first, entry.second); } + + WriteRayTracingShaderGroupHandlesState(state_table); +} + +void VulkanStateWriter::WriteRayTracingShaderGroupHandlesState(const VulkanStateTable& state_table) +{ + state_table.VisitWrappers([&](const vulkan_wrappers::PipelineWrapper* wrapper) { + assert(wrapper != nullptr); + + if (wrapper->create_call_id == format::ApiCall_vkCreateRayTracingPipelinesKHR) + { + auto device_wrapper = state_table.GetDeviceWrapper(wrapper->device_id); + uint32_t data_size = device_wrapper->property_feature_info.property_shaderGroupHandleSize * + wrapper->num_shader_group_handles; + std::vector data(data_size); + + VkResult result = device_wrapper->layer_table.GetRayTracingShaderGroupHandlesKHR( + device_wrapper->handle, wrapper->handle, 0, wrapper->num_shader_group_handles, data_size, data.data()); + + parameter_stream_.Clear(); + encoder_.EncodeHandleIdValue(wrapper->device_id); + encoder_.EncodeHandleIdValue(wrapper->handle_id); + encoder_.EncodeUInt32Value(0); // firstGroup + encoder_.EncodeUInt32Value(wrapper->num_shader_group_handles); // groupCount + encoder_.EncodeSizeTValue(data_size); // dataSize + encoder_.EncodeVoidArray(data.data(), data_size); // data + encoder_.EncodeEnumValue(result); // result + WriteFunctionCall(format::ApiCall_vkGetRayTracingShaderGroupHandlesKHR, ¶meter_stream_); + parameter_stream_.Clear(); + } + }); } void VulkanStateWriter::WriteDescriptorSetState(const VulkanStateTable& state_table) diff --git a/framework/encode/vulkan_state_writer.h b/framework/encode/vulkan_state_writer.h index 4747ee67a1..131f938182 100644 --- a/framework/encode/vulkan_state_writer.h +++ b/framework/encode/vulkan_state_writer.h @@ -154,6 +154,8 @@ class VulkanStateWriter void WriteRayTracingPipelinePropertiesState(const VulkanStateTable& state_table); + void WriteRayTracingShaderGroupHandlesState(const VulkanStateTable& state_table); + void WriteAccelerationStructureKHRState(const VulkanStateTable& state_table); void WriteDeferredOperationJoinCommand(format::HandleId device_id, format::HandleId deferred_operation_id); diff --git a/framework/generated/generated_vulkan_replay_consumer.cpp b/framework/generated/generated_vulkan_replay_consumer.cpp index a5d8e103fc..4f776e73b7 100644 --- a/framework/generated/generated_vulkan_replay_consumer.cpp +++ b/framework/generated/generated_vulkan_replay_consumer.cpp @@ -1462,14 +1462,14 @@ void VulkanReplayConsumer::Process_vkCmdBindPipeline( VkPipelineBindPoint pipelineBindPoint, format::HandleId pipeline) { - VkCommandBuffer in_commandBuffer = MapHandle(commandBuffer, &CommonObjectInfoTable::GetVkCommandBufferInfo); - VkPipeline in_pipeline = MapHandle(pipeline, &CommonObjectInfoTable::GetVkPipelineInfo); + auto in_commandBuffer = GetObjectInfoTable().GetVkCommandBufferInfo(commandBuffer); + auto in_pipeline = GetObjectInfoTable().GetVkPipelineInfo(pipeline); - GetDeviceTable(in_commandBuffer)->CmdBindPipeline(in_commandBuffer, pipelineBindPoint, in_pipeline); + OverrideCmdBindPipeline(GetDeviceTable(in_commandBuffer->handle)->CmdBindPipeline, in_commandBuffer, pipelineBindPoint, in_pipeline); if (options_.dumping_resources) { - resource_dumper_->Process_vkCmdBindPipeline(call_info, GetDeviceTable(in_commandBuffer)->CmdBindPipeline, in_commandBuffer, pipelineBindPoint, GetObjectInfoTable().GetVkPipelineInfo(pipeline)); + resource_dumper_->Process_vkCmdBindPipeline(call_info, GetDeviceTable(in_commandBuffer->handle)->CmdBindPipeline, in_commandBuffer->handle, pipelineBindPoint, in_pipeline); } } diff --git a/framework/generated/vulkan_generators/replay_overrides.json b/framework/generated/vulkan_generators/replay_overrides.json index 54ffafdee8..a0da0740f1 100644 --- a/framework/generated/vulkan_generators/replay_overrides.json +++ b/framework/generated/vulkan_generators/replay_overrides.json @@ -127,6 +127,7 @@ "vkDestroyAccelerationStructureKHR": "OverrideDestroyAccelerationStructureKHR", "vkCmdBuildAccelerationStructuresKHR": "OverrideCmdBuildAccelerationStructuresKHR", "vkCmdCopyAccelerationStructureKHR" : "OverrideCmdCopyAccelerationStructureKHR", - "vkCmdWriteAccelerationStructuresPropertiesKHR" : "OverrideCmdWriteAccelerationStructuresPropertiesKHR" + "vkCmdWriteAccelerationStructuresPropertiesKHR" : "OverrideCmdWriteAccelerationStructuresPropertiesKHR", + "vkCmdBindPipeline" : "OverrideCmdBindPipeline" } } diff --git a/framework/generated/vulkan_generators/vulkan_replay_consumer_body_generator.py b/framework/generated/vulkan_generators/vulkan_replay_consumer_body_generator.py index ad1bbaee03..f4a21b1582 100644 --- a/framework/generated/vulkan_generators/vulkan_replay_consumer_body_generator.py +++ b/framework/generated/vulkan_generators/vulkan_replay_consumer_body_generator.py @@ -370,6 +370,8 @@ def make_consumer_func_body(self, return_type, name, values): elif self.is_handle(val.base_type): if val.is_pointer: dump_resource_arglist += val.name + '->GetHandlePointer()' + elif val.base_type in ["VkPipeline"]: + dump_resource_arglist += 'in_' + val.name else: dump_resource_arglist += 'in_' + val.name + '->handle' else: diff --git a/framework/graphics/vulkan_device_util.cpp b/framework/graphics/vulkan_device_util.cpp index 160dd44d97..be77b31f3f 100644 --- a/framework/graphics/vulkan_device_util.cpp +++ b/framework/graphics/vulkan_device_util.cpp @@ -199,13 +199,16 @@ VulkanDeviceUtil::EnableRequiredPhysicalDeviceFeatures(uint32_t result.feature_rayTracingPipelineShaderGroupHandleCaptureReplay = rt_pipeline_features->rayTracingPipelineShaderGroupHandleCaptureReplay; + + // retrieve raytracing-pipeline-properties + VkPhysicalDeviceRayTracingPipelinePropertiesKHR rt_properties{ + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, nullptr + }; + GetPhysicalDeviceProperties(instance_api_version, instance_table, physical_device, rt_properties); + result.property_shaderGroupHandleSize = rt_properties.shaderGroupHandleSize; + if (result.feature_rayTracingPipelineShaderGroupHandleCaptureReplay) { - VkPhysicalDeviceRayTracingPipelinePropertiesKHR rt_properties{ - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, nullptr - }; - GetPhysicalDeviceProperties(instance_api_version, instance_table, physical_device, rt_properties); - result.property_shaderGroupHandleCaptureReplaySize = rt_properties.shaderGroupHandleCaptureReplaySize; } diff --git a/framework/graphics/vulkan_device_util.h b/framework/graphics/vulkan_device_util.h index e8a87a1708..3472d253c9 100644 --- a/framework/graphics/vulkan_device_util.h +++ b/framework/graphics/vulkan_device_util.h @@ -39,6 +39,7 @@ GFXRECON_BEGIN_NAMESPACE(graphics) struct VulkanDevicePropertyFeatureInfo { + uint32_t property_shaderGroupHandleSize{ 0 }; uint32_t property_shaderGroupHandleCaptureReplaySize{ 0 }; VkBool32 feature_bufferDeviceAddressCaptureReplay{ VK_FALSE }; diff --git a/framework/graphics/vulkan_shader_group_handle.h b/framework/graphics/vulkan_shader_group_handle.h index 07aeb7c532..7cf6220ea3 100644 --- a/framework/graphics/vulkan_shader_group_handle.h +++ b/framework/graphics/vulkan_shader_group_handle.h @@ -34,21 +34,31 @@ struct shader_group_handle_t { static constexpr uint32_t MAX_HANDLE_SIZE = 32; uint8_t data[MAX_HANDLE_SIZE] = {}; - uint32_t size = 0; shader_group_handle_t() = default; shader_group_handle_t(const uint8_t* in_data, uint32_t in_size) { GFXRECON_ASSERT(in_size <= MAX_HANDLE_SIZE); + if (in_size > MAX_HANDLE_SIZE) + { + GFXRECON_LOG_WARNING_ONCE("capture shader-group-handle size-overflow, replay might not work correctly") + } memcpy(data, in_data, std::min(in_size, MAX_HANDLE_SIZE)); - size = in_size; } - inline bool operator==(const shader_group_handle_t& other) const + inline constexpr bool operator==(const shader_group_handle_t& other) const { - return size == other.size && memcmp(data, other.data, size) == 0; + for (uint32_t i = 0; i < MAX_HANDLE_SIZE; ++i) + { + if (data[i] != other.data[i]) + { + return false; + } + } + return true; } + inline constexpr bool operator!=(const shader_group_handle_t& other) const { return !(*this == other); } }; GFXRECON_END_NAMESPACE(graphics) @@ -62,7 +72,8 @@ struct hash { inline size_t operator()(const gfxrecon::graphics::shader_group_handle_t& handle) const { - return gfxrecon::util::hash::hash_range(handle.data, handle.data + handle.size); + return gfxrecon::util::hash::hash_range( + handle.data, handle.data + gfxrecon::graphics::shader_group_handle_t::MAX_HANDLE_SIZE); } }; diff --git a/framework/util/CMakeLists.txt b/framework/util/CMakeLists.txt index 0137b22089..5289d49de4 100644 --- a/framework/util/CMakeLists.txt +++ b/framework/util/CMakeLists.txt @@ -51,6 +51,7 @@ target_sources(gfxrecon_util ${CMAKE_CURRENT_LIST_DIR}/json_util.cpp ${CMAKE_CURRENT_LIST_DIR}/keyboard.h ${CMAKE_CURRENT_LIST_DIR}/keyboard.cpp + ${CMAKE_CURRENT_LIST_DIR}/linear_hashmap.h ${CMAKE_CURRENT_LIST_DIR}/logging.h ${CMAKE_CURRENT_LIST_DIR}/logging.cpp ${CMAKE_CURRENT_LIST_DIR}/lz4_compressor.h @@ -205,6 +206,7 @@ if (${RUN_TESTS}) add_executable(gfxrecon_util_test "") target_sources(gfxrecon_util_test PRIVATE ${CMAKE_CURRENT_LIST_DIR}/test/main.cpp + ${CMAKE_CURRENT_LIST_DIR}/test/test_linear_hashmap.cpp ${CMAKE_CURRENT_LIST_DIR}/../../tools/platform_debug_helper.cpp $<$:${CMAKE_CURRENT_LIST_DIR}/test/dx_pointers.h> $<$:${CMAKE_CURRENT_LIST_DIR}/test/dx12_utils.cpp> diff --git a/framework/util/hash.h b/framework/util/hash.h index f541774ed6..0c5352982c 100644 --- a/framework/util/hash.h +++ b/framework/util/hash.h @@ -19,6 +19,10 @@ ** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING ** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ** DEALINGS IN THE SOFTWARE. +** +//----------------------------------------------------------------------------- +// MurmurHash3 was written by Austin Appleby, and is placed in the public +// domain. The author hereby disclaims copyright to this source code. */ #pragma once @@ -98,6 +102,58 @@ std::size_t hash_range(It first, It last) return seed; } +// https://en.wikipedia.org/wiki/MurmurHash +inline uint32_t murmur_32_scramble(uint32_t k) +{ + k *= 0xcc9e2d51; + k = (k << 15) | (k >> 17); + k *= 0x1b873593; + return k; +} + +// https://en.wikipedia.org/wiki/MurmurHash +template +static inline uint32_t murmur3_32(const K& key, uint32_t seed) +{ + constexpr uint32_t num_hashes = sizeof(K) / sizeof(uint32_t); + constexpr uint32_t num_excess_bytes = sizeof(K) % sizeof(uint32_t); + + uint32_t h = seed; + + if constexpr (num_hashes > 0u) + { + auto ptr = reinterpret_cast(&key), end = ptr + num_hashes; + + for (; ptr < end; ++ptr) + { + h ^= murmur_32_scramble(*ptr); + h = (h << 13) | (h >> 19); + h = h * 5 + 0xe6546b64; + } + } + + if constexpr (num_excess_bytes > 0u) + { + auto end_u8 = reinterpret_cast(&key) + sizeof(uint32_t) * num_hashes; + uint32_t k = 0; + for (uint32_t i = num_excess_bytes; i; i--) + { + k <<= 8; + k |= end_u8[i - 1]; + } + h ^= murmur_32_scramble(k); + } + + // finalize + h ^= sizeof(K); + h ^= h >> 16; + h *= 0x85ebca6b; + h ^= h >> 13; + h *= 0xc2b2ae35; + h ^= h >> 16; + return h; +} + GFXRECON_END_NAMESPACE(hash) GFXRECON_END_NAMESPACE(util) GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/framework/util/linear_hashmap.h b/framework/util/linear_hashmap.h new file mode 100644 index 0000000000..3f8be53e9e --- /dev/null +++ b/framework/util/linear_hashmap.h @@ -0,0 +1,285 @@ +/* +** Copyright (c) 2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#ifndef GFXRECONSTRUCT_UTIL_LINEAR_HASHMAP_H +#define GFXRECONSTRUCT_UTIL_LINEAR_HASHMAP_H + +#include "util/defines.h" +#include "util/hash.h" +#include +#include +#include + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) +GFXRECON_BEGIN_NAMESPACE(util) + +inline constexpr bool is_pow_2(uint64_t v) +{ + return !(v & (v - 1)); +} + +inline constexpr uint64_t next_pow_2(uint64_t v) +{ + if (is_pow_2(v)) + { + return v; + } + v--; + v |= v >> 1U; + v |= v >> 2U; + v |= v >> 4U; + v |= v >> 8U; + v |= v >> 16U; + v |= v >> 32U; + v++; + return v; +} + +/** + * @brief linear_hashmap is a hashmap using open addressing with linear probing. + * it can be used for POD key/value pairs and operates on a single array without chaining. + * + * goal is to offer an easy and performant hashmap-storage that can be populated on CPU- and queried on GPU-pipelines. + * + * @tparam K + * @tparam V + */ +template +class linear_hashmap +{ + public: + using key_t = K; + using value_t = V; + using hash32_fn = std::function; + static_assert(std::is_default_constructible_v, "key_t not default-constructible"); + static_assert(key_t() == key_t(), "key_t not comparable"); + + linear_hashmap() = default; + linear_hashmap(const linear_hashmap&) = delete; + linear_hashmap(linear_hashmap& other) : linear_hashmap() { swap(*this, other); }; + linear_hashmap& operator=(linear_hashmap other) + { + swap(*this, other); + return *this; + } + + explicit linear_hashmap(uint64_t min_capacity) : + m_capacity(next_pow_2(min_capacity)), m_storage(std::make_unique(m_capacity)) + { + clear(); + } + + [[nodiscard]] inline size_t size() const { return m_num_elements; } + + [[nodiscard]] inline size_t capacity() const { return m_capacity; } + + [[nodiscard]] inline bool empty() const { return size() == 0; } + + inline void clear() + { + m_num_elements = 0; + storage_item_t *ptr = m_storage.get(), *end = ptr + m_capacity; + for (; ptr != end; ++ptr) + { + ptr->key = key_t(); + ptr->value = std::optional(); + } + } + + inline uint32_t put(const key_t& key, const value_t& value) + { + check_load_factor(); + return internal_put(key, value); + } + + [[nodiscard]] std::optional get(const key_t& key) const + { + if (!m_capacity) + { + return {}; + } + + for (uint32_t idx = m_hash_fn(key);; idx++) + { + idx &= m_capacity - 1; + auto& item = m_storage[idx]; + if (item.key == key_t()) + { + return {}; + } + else if (key == item.key) + { + if (item.value) + { + return item.value; + } + } + } + } + + void remove(const key_t& key) + { + if (!m_capacity) + { + return; + } + + for (uint32_t idx = m_hash_fn(key);; idx++) + { + idx &= m_capacity - 1; + auto& item = m_storage[idx]; + if (item.key == key_t()) + { + return; + } + else if (key == item.key && item.value) + { + item.value = {}; + m_num_elements--; + return; + } + } + } + + [[nodiscard]] inline bool contains(const key_t& key) const { return get(key) != std::nullopt; } + + size_t get_storage(void* dst) const + { + struct output_item_t + { + key_t key = {}; + value_t value = {}; + }; + + if (dst) + { + auto output_ptr = reinterpret_cast(dst); + storage_item_t *item = m_storage.get(), *end = item + m_capacity; + for (; item != end; ++item, ++output_ptr) + { + if (item->key != key_t()) + { + output_ptr->key = item->key; + output_ptr->value = item->value ? *item->value : value_t(); + } + else + { + *output_ptr = {}; + } + } + } + return sizeof(output_item_t) * m_capacity; + } + + void reserve(size_t new_capacity) + { + auto new_linear_hashmap = linear_hashmap(new_capacity); + storage_item_t *ptr = m_storage.get(), *end = ptr + m_capacity; + for (; ptr != end; ++ptr) + { + if (ptr->key != key_t()) + { + if (ptr->value) + { + new_linear_hashmap.put(ptr->key, *ptr->value); + } + } + } + swap(*this, new_linear_hashmap); + } + + [[nodiscard]] float load_factor() const { return static_cast(m_num_elements) / m_capacity; } + + [[nodiscard]] float max_load_factor() const { return m_max_load_factor; } + + void max_load_factor(float load_factor) + { + m_max_load_factor = std::clamp(load_factor, 0.01f, 1.f); + check_load_factor(); + } + + friend void swap(linear_hashmap& lhs, linear_hashmap& rhs) + { + std::swap(lhs.m_capacity, rhs.m_capacity); + std::swap(lhs.m_num_elements, rhs.m_num_elements); + std::swap(lhs.m_storage, rhs.m_storage); + std::swap(lhs.m_hash_fn, rhs.m_hash_fn); + std::swap(lhs.m_max_load_factor, rhs.m_max_load_factor); + std::swap(lhs.m_grow_factor, rhs.m_grow_factor); + } + + private: + struct storage_item_t + { + key_t key; + std::optional value; + }; + + inline void check_load_factor() + { + if (m_num_elements >= m_capacity * m_max_load_factor) + { + reserve(std::max(32, static_cast(m_grow_factor * m_capacity))); + } + } + + inline uint32_t internal_put(const key_t key, const value_t& value) + { + uint32_t probe_length = 0; + + for (uint64_t idx = m_hash_fn(key);; idx++, probe_length++) + { + idx &= m_capacity - 1; + auto& item = m_storage[idx]; + + // load previous key + key_t probed_key = item.key; + + if (probed_key != key) + { + // hit another valid entry, keep probing + if (probed_key != key_t() && item.value) + { + continue; + } + item.key = key; + m_num_elements++; + } + item.value = value; + return probe_length; + } + } + + uint64_t m_capacity = 0; + uint64_t m_num_elements = 0; + std::unique_ptr m_storage; + hash32_fn m_hash_fn = std::bind(hash::murmur3_32, std::placeholders::_1, 0); + + // reasonably low load-factor to keep average probe-lengths low + float m_max_load_factor = 0.5f; + float m_grow_factor = 2.f; +}; + +GFXRECON_END_NAMESPACE(util) +GFXRECON_END_NAMESPACE(gfxrecon) + +#endif // GFXRECONSTRUCT_UTIL_LINEAR_HASHMAP_H diff --git a/framework/util/test/test_linear_hashmap.cpp b/framework/util/test/test_linear_hashmap.cpp new file mode 100644 index 0000000000..593e342e65 --- /dev/null +++ b/framework/util/test/test_linear_hashmap.cpp @@ -0,0 +1,139 @@ +/* +** Copyright (c) 2024 LunarG, Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and associated documentation files (the "Software"), +** to deal in the Software without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Software, and to permit persons to whom the +** Software is furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Software. +** +** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +** DEALINGS IN THE SOFTWARE. +*/ + +#include +#include "util/linear_hashmap.h" +#include + +TEST_CASE("linear_hashmap - create empty map", "[]") +{ + gfxrecon::util::linear_hashmap hashmap; + REQUIRE(hashmap.empty()); + hashmap.clear(); + REQUIRE(hashmap.capacity() == 0); + REQUIRE(hashmap.get_storage(nullptr) == 0); +} + +TEST_CASE("linear_hashmap - basic usage", "[]") +{ + constexpr uint32_t test_capacity = 100; + gfxrecon::util::linear_hashmap hashmap(test_capacity); + REQUIRE(hashmap.empty()); + REQUIRE(hashmap.get_storage(nullptr) > 0); + + // capacity will be rounded to next pow2 + REQUIRE(hashmap.capacity() >= test_capacity); + REQUIRE(gfxrecon::util::is_pow_2(hashmap.capacity())); + + REQUIRE_FALSE(hashmap.contains(0)); + REQUIRE_FALSE(hashmap.contains(13)); + REQUIRE_FALSE(hashmap.contains(42)); + + hashmap.put(69, 99); + hashmap.put(13, 12); + hashmap.put(8, 15); + REQUIRE(hashmap.size() == 3); + + hashmap.remove(8); + REQUIRE(hashmap.size() == 2); + REQUIRE_FALSE(hashmap.contains(8)); + + REQUIRE(hashmap.contains(69)); + REQUIRE(hashmap.get(69) == 99); + REQUIRE(hashmap.contains(13)); + REQUIRE(hashmap.get(13) == 12); + + auto storage = std::make_unique(hashmap.get_storage(nullptr)); + hashmap.get_storage(storage.get()); +} + +TEST_CASE("linear_hashmap - use a custom_key", "[]") +{ + // custom 32-byte key + struct custom_key_t + { + int v[8]{}; + constexpr bool operator==(const custom_key_t& other) const + { + for (uint32_t i = 0; i < 8; ++i) + { + if (v[i] != other.v[i]) + { + return false; + } + } + return true; + } + constexpr bool operator!=(const custom_key_t& other) const { return !(other == *this); } + }; + constexpr uint32_t test_capacity = 100; + auto hashmap = gfxrecon::util::linear_hashmap(test_capacity); + + custom_key_t k1{ { 1, 2, 3, 4, 5, 6, 7, 8 } }; + hashmap.put(k1, 69); + REQUIRE(hashmap.contains(k1)); + REQUIRE_FALSE(hashmap.contains(custom_key_t())); +} + +TEST_CASE("linear_hashmap - reserve", "[]") +{ + gfxrecon::util::linear_hashmap hashmap; + + // fix by resizing + hashmap.reserve(17); + REQUIRE(hashmap.empty()); + hashmap.put(13, 12); + REQUIRE(hashmap.contains(13)); + + // empty / no capacity specified -> triggers internal resize + hashmap = {}; + hashmap.put(13, 12); + REQUIRE(hashmap.contains(13)); +} + +TEST_CASE("linear_hashmap - probe_length", "[]") +{ + gfxrecon::util::linear_hashmap hashmap; + + // default load_factor is 0.5 + REQUIRE(hashmap.max_load_factor() == 0.5f); + + // test a load-factor of 0.25 + hashmap.max_load_factor(0.25f); + + constexpr uint32_t test_capacity = 512; + constexpr uint32_t num_insertions = 128; + hashmap.reserve(test_capacity); + + float probe_length_sum = 0.f; + for (uint32_t i = 0; i < num_insertions; i++) + { + probe_length_sum += static_cast(hashmap.put(i, 69)); + } + float avg_probe_length = probe_length_sum / num_insertions; + + // for a load-factor of 0.25, we expect very short probe-lengths + constexpr float expected_max_avg_probe_length = 0.15f; + REQUIRE(avg_probe_length <= expected_max_avg_probe_length); + + REQUIRE(hashmap.load_factor() <= 0.25f); +} \ No newline at end of file From cc0dc4075d42265a7d1b90e2de300a0e005254db Mon Sep 17 00:00:00 2001 From: bradgrantham-lunarg <50641407+bradgrantham-lunarg@users.noreply.github.com> Date: Tue, 5 Nov 2024 11:44:23 -0800 Subject: [PATCH 65/70] Also run unit tests in GitHub CI (#1867) --- .github/workflows/ci_build.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci_build.yml b/.github/workflows/ci_build.yml index 10d2530ea6..567dfc874c 100644 --- a/.github/workflows/ci_build.yml +++ b/.github/workflows/ci_build.yml @@ -49,11 +49,11 @@ jobs: then clang-format --version # A check for version 9 should be added. git fetch origin ${{ github.base_ref }} # Fetch target branch to FETCH_HEAD for code style check. - python3 scripts/build.py --skip-tests --config ${{ matrix.config.type }} --check-code-style-base FETCH_HEAD --parallel 0 + python3 scripts/build.py --config ${{ matrix.config.type }} --check-code-style-base FETCH_HEAD --parallel 0 python3 framework/generated/generate_vulkan.py # check generated code isn't out of date git diff --exit-code else - python3 scripts/build.py --skip-tests --config ${{ matrix.config.type }} --skip-check-code-style --parallel 0 + python3 scripts/build.py --config ${{ matrix.config.type }} --skip-check-code-style --parallel 0 fi - name: Prepare artifacts run: | @@ -119,7 +119,7 @@ jobs: git diff --exit-code - name: Run build script run: | - python scripts\build.py --skip-check-code-style --skip-tests --config ${{ matrix.config.type }} --parallel 0 + python scripts\build.py --skip-check-code-style --config ${{ matrix.config.type }} --parallel 0 - name: Prepare artifacts run: | copy LICENSE.txt ${{ matrix.config.build_dir }}\windows\x64\output\bin\ @@ -171,7 +171,7 @@ jobs: run: .github/workflows/scripts/build-dependencies-macos.sh - name: Run build script run: | - python3 scripts/build.py --skip-check-code-style --skip-tests --config ${{ matrix.config.type }} --cmake-extra "CMAKE_PREFIX_PATH=$HOME/deps" --cmake-extra CMAKE_OSX_DEPLOYMENT_TARGET=11.0 --parallel 0 + python3 scripts/build.py --skip-check-code-style --config ${{ matrix.config.type }} --cmake-extra "CMAKE_PREFIX_PATH=$HOME/deps" --cmake-extra CMAKE_OSX_DEPLOYMENT_TARGET=11.0 --parallel 0 - name: Prepare artifacts run: | cp LICENSE.txt ${{ matrix.config.build_dir }}/darwin/universal/output/bin/ From e2e518d0b6fef2c04e8d7bef3a21de26c523177a Mon Sep 17 00:00:00 2001 From: Panagiotis Apostolou <104391532+panos-lunarg@users.noreply.github.com> Date: Wed, 6 Nov 2024 15:55:27 +0200 Subject: [PATCH 66/70] Fixes for newly added VulkanStateTracker::DestroyState (#1869) The DestroyState() overrides added by 1644 should also do what the default DestroyState is doing. This patch should address this omission --- framework/encode/vulkan_state_tracker.cpp | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/framework/encode/vulkan_state_tracker.cpp b/framework/encode/vulkan_state_tracker.cpp index 40f3eb4f16..4ddab90a08 100644 --- a/framework/encode/vulkan_state_tracker.cpp +++ b/framework/encode/vulkan_state_tracker.cpp @@ -1955,6 +1955,7 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::DeviceMemoryWrapper* wrap void gfxrecon::encode::VulkanStateTracker::DestroyState(vulkan_wrappers::BufferWrapper* wrapper) { GFXRECON_ASSERT(wrapper != nullptr); + wrapper->create_parameters = nullptr; if (wrapper != nullptr && wrapper->bind_device != nullptr) { @@ -2037,6 +2038,7 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::AccelerationStructureKHRW void VulkanStateTracker::DestroyState(vulkan_wrappers::AccelerationStructureNVWrapper* wrapper) { assert(wrapper != nullptr); + wrapper->create_parameters = nullptr; for (auto entry : wrapper->descriptor_sets_bound_to) { @@ -2046,6 +2048,9 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::AccelerationStructureNVWr void VulkanStateTracker::DestroyState(vulkan_wrappers::ImageWrapper* wrapper) { + assert(wrapper != nullptr); + wrapper->create_parameters = nullptr; + if (wrapper->bind_memory_id != format::kNullHandleId) { vulkan_wrappers::DeviceMemoryWrapper* mem_wrapper = @@ -2078,6 +2083,7 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::ImageWrapper* wrapper) void VulkanStateTracker::DestroyState(vulkan_wrappers::ImageViewWrapper* wrapper) { assert(wrapper != nullptr); + wrapper->create_parameters = nullptr; for (auto entry : wrapper->descriptor_sets_bound_to) { @@ -2093,6 +2099,7 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::ImageViewWrapper* wrapper void VulkanStateTracker::DestroyState(vulkan_wrappers::BufferViewWrapper* wrapper) { assert(wrapper != nullptr); + wrapper->create_parameters = nullptr; for (auto entry : wrapper->descriptor_sets_bound_to) { @@ -2108,6 +2115,7 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::BufferViewWrapper* wrappe void VulkanStateTracker::DestroyState(vulkan_wrappers::SamplerWrapper* wrapper) { assert(wrapper != nullptr); + wrapper->create_parameters = nullptr; for (auto entry : wrapper->descriptor_sets_bound_to) { @@ -2117,6 +2125,9 @@ void VulkanStateTracker::DestroyState(vulkan_wrappers::SamplerWrapper* wrapper) void VulkanStateTracker::DestroyState(vulkan_wrappers::DescriptorSetWrapper* wrapper) { + assert(wrapper != nullptr); + wrapper->create_parameters = nullptr; + for (auto& entry : wrapper->bindings) { vulkan_state_info::DescriptorInfo& binding = entry.second; From e35c9bc89b5df367f5cd9c00504602c6f9d9c84f Mon Sep 17 00:00:00 2001 From: Antonio Caggiano Date: Wed, 6 Nov 2024 13:54:03 -0700 Subject: [PATCH 67/70] tocpp: Cast values to platfrom full types (#1871) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix invalid conversion errors by casting argument values to their target platform full types. This fixes compilation issues for code like this: ``` VkWaylandSurfaceCreateInfoKHR waylandSurfaceCreateInfoKHR_17 { VkStructureType(1000006000), NULL, VkWaylandSurfaceCreateFlagsKHR(0), 0, 0x2000000, }; ``` which compiled with g++ 14.2.1 20240912 yields this: ``` src/frame_0000_0000.cpp:132:25: error: invalid conversion from ‘int’ to ‘wl_surface*’ [-fpermissive] ``` --- .../generated_vulkan_cpp_structs.cpp | 46 +++++++++---------- .../vulkan_cpp_struct_generator.py | 15 +++++- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/framework/generated/generated_vulkan_cpp_structs.cpp b/framework/generated/generated_vulkan_cpp_structs.cpp index c52c46332e..739ad793c6 100644 --- a/framework/generated/generated_vulkan_cpp_structs.cpp +++ b/framework/generated/generated_vulkan_cpp_structs.cpp @@ -8383,7 +8383,7 @@ std::string GenerateStruct_VkXlibSurfaceCreateInfoKHR(std::ostream &out, const V struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkXlibSurfaceCreateFlagsKHR(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->dpy << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->dpy << ")" << "," << std::endl; struct_body << "\t\t\t" << structInfo->window << ","; std::string variable_name = consumer.AddStruct(struct_body, "xlibSurfaceCreateInfoKHR"); out << "\t\t" << "VkXlibSurfaceCreateInfoKHR " << variable_name << " {" << std::endl; @@ -8400,7 +8400,7 @@ std::string GenerateStruct_VkXcbSurfaceCreateInfoKHR(std::ostream &out, const Vk struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkXcbSurfaceCreateFlagsKHR(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->connection << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->connection << ")" << "," << std::endl; struct_body << "\t\t\t" << structInfo->window << ","; std::string variable_name = consumer.AddStruct(struct_body, "xcbSurfaceCreateInfoKHR"); out << "\t\t" << "VkXcbSurfaceCreateInfoKHR " << variable_name << " {" << std::endl; @@ -8417,8 +8417,8 @@ std::string GenerateStruct_VkWaylandSurfaceCreateInfoKHR(std::ostream &out, cons struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkWaylandSurfaceCreateFlagsKHR(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->display << "," << std::endl; - struct_body << "\t\t\t" << structInfo->surface << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->display << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->surface << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "waylandSurfaceCreateInfoKHR"); out << "\t\t" << "VkWaylandSurfaceCreateInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -8434,7 +8434,7 @@ std::string GenerateStruct_VkAndroidSurfaceCreateInfoKHR(std::ostream &out, cons struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkAndroidSurfaceCreateFlagsKHR(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->window << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->window << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "androidSurfaceCreateInfoKHR"); out << "\t\t" << "VkAndroidSurfaceCreateInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -8450,8 +8450,8 @@ std::string GenerateStruct_VkWin32SurfaceCreateInfoKHR(std::ostream &out, const struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkWin32SurfaceCreateFlagsKHR(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->hinstance << "," << std::endl; - struct_body << "\t\t\t" << structInfo->hwnd << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->hinstance << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->hwnd << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "win32SurfaceCreateInfoKHR"); out << "\t\t" << "VkWin32SurfaceCreateInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -9902,7 +9902,7 @@ std::string GenerateStruct_VkExportMemoryWin32HandleInfoKHR(std::ostream &out, c struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << structInfo->pAttributes << "," << std::endl; struct_body << "\t\t\t" << structInfo->dwAccess << "," << std::endl; - struct_body << "\t\t\t" << structInfo->name << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->name << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "exportMemoryWin32HandleInfoKHR"); out << "\t\t" << "VkExportMemoryWin32HandleInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -9917,8 +9917,8 @@ std::string GenerateStruct_VkImportMemoryWin32HandleInfoKHR(std::ostream &out, c struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkExternalMemoryHandleTypeFlagBits(" << structInfo->handleType << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->handle << "," << std::endl; - struct_body << "\t\t\t" << structInfo->name << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->handle << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->name << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "importMemoryWin32HandleInfoKHR"); out << "\t\t" << "VkImportMemoryWin32HandleInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -10123,7 +10123,7 @@ std::string GenerateStruct_VkExportSemaphoreWin32HandleInfoKHR(std::ostream &out struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << structInfo->pAttributes << "," << std::endl; struct_body << "\t\t\t" << structInfo->dwAccess << "," << std::endl; - struct_body << "\t\t\t" << structInfo->name << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->name << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "exportSemaphoreWin32HandleInfoKHR"); out << "\t\t" << "VkExportSemaphoreWin32HandleInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -10140,8 +10140,8 @@ std::string GenerateStruct_VkImportSemaphoreWin32HandleInfoKHR(std::ostream &out struct_body << "\t\t\t" << consumer.GetHandle(metaInfo->semaphore) << "," << std::endl; struct_body << "\t\t\t" << "VkSemaphoreImportFlags(" << structInfo->flags << ")" << "," << std::endl; struct_body << "\t\t\t" << "VkExternalSemaphoreHandleTypeFlagBits(" << structInfo->handleType << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->handle << "," << std::endl; - struct_body << "\t\t\t" << structInfo->name << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->handle << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->name << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "importSemaphoreWin32HandleInfoKHR"); out << "\t\t" << "VkImportSemaphoreWin32HandleInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -10312,7 +10312,7 @@ std::string GenerateStruct_VkExportFenceWin32HandleInfoKHR(std::ostream &out, co struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << structInfo->pAttributes << "," << std::endl; struct_body << "\t\t\t" << structInfo->dwAccess << "," << std::endl; - struct_body << "\t\t\t" << structInfo->name << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->name << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "exportFenceWin32HandleInfoKHR"); out << "\t\t" << "VkExportFenceWin32HandleInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -10344,8 +10344,8 @@ std::string GenerateStruct_VkImportFenceWin32HandleInfoKHR(std::ostream &out, co struct_body << "\t\t\t" << consumer.GetHandle(metaInfo->fence) << "," << std::endl; struct_body << "\t\t\t" << "VkFenceImportFlags(" << structInfo->flags << ")" << "," << std::endl; struct_body << "\t\t\t" << "VkExternalFenceHandleTypeFlagBits(" << structInfo->handleType << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->handle << "," << std::endl; - struct_body << "\t\t\t" << structInfo->name << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->handle << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->name << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "importFenceWin32HandleInfoKHR"); out << "\t\t" << "VkImportFenceWin32HandleInfoKHR " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -13187,7 +13187,7 @@ std::string GenerateStruct_VkImportMemoryWin32HandleInfoNV(std::ostream &out, co struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkExternalMemoryHandleTypeFlagsNV(" << structInfo->handleType << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->handle << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->handle << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "importMemoryWin32HandleInfoNV"); out << "\t\t" << "VkImportMemoryWin32HandleInfoNV " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -15623,7 +15623,7 @@ std::string GenerateStruct_VkMetalSurfaceCreateInfoEXT(std::ostream &out, const struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkMetalSurfaceCreateFlagsEXT(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->pLayer << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->pLayer << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "metalSurfaceCreateInfoEXT"); out << "\t\t" << "VkMetalSurfaceCreateInfoEXT " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -16084,7 +16084,7 @@ std::string GenerateStruct_VkSurfaceFullScreenExclusiveWin32InfoEXT(std::ostream std::string pnext_name = GenerateExtension(out, structInfo->pNext, metaInfo->pNext, consumer); struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; - struct_body << "\t\t\t" << structInfo->hmonitor << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->hmonitor << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "surfaceFullScreenExclusiveWin32InfoEXT"); out << "\t\t" << "VkSurfaceFullScreenExclusiveWin32InfoEXT " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -17857,8 +17857,8 @@ std::string GenerateStruct_VkDirectFBSurfaceCreateInfoEXT(std::ostream &out, con struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkDirectFBSurfaceCreateFlagsEXT(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->dfb << "," << std::endl; - struct_body << "\t\t\t" << structInfo->surface << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->dfb << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->surface << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "directFBSurfaceCreateInfoEXT"); out << "\t\t" << "VkDirectFBSurfaceCreateInfoEXT " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; @@ -18329,8 +18329,8 @@ std::string GenerateStruct_VkScreenSurfaceCreateInfoQNX(std::ostream &out, const struct_body << "\t" << "VkStructureType(" << structInfo->sType << ")" << "," << std::endl; struct_body << "\t\t\t" << pnext_name << "," << std::endl; struct_body << "\t\t\t" << "VkScreenSurfaceCreateFlagsQNX(" << structInfo->flags << ")" << "," << std::endl; - struct_body << "\t\t\t" << structInfo->context << "," << std::endl; - struct_body << "\t\t\t" << structInfo->window << ","; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->context << ")" << "," << std::endl; + struct_body << "\t\t\t" << "reinterpret_cast(" << structInfo->window << ")" << ","; std::string variable_name = consumer.AddStruct(struct_body, "screenSurfaceCreateInfoQNX"); out << "\t\t" << "VkScreenSurfaceCreateInfoQNX " << variable_name << " {" << std::endl; out << "\t\t" << struct_body.str() << std::endl; diff --git a/framework/generated/vulkan_generators/vulkan_cpp_struct_generator.py b/framework/generated/vulkan_generators/vulkan_cpp_struct_generator.py index c4590b4ae0..31dceef13f 100644 --- a/framework/generated/vulkan_generators/vulkan_cpp_struct_generator.py +++ b/framework/generated/vulkan_generators/vulkan_cpp_struct_generator.py @@ -498,7 +498,17 @@ def handleInputArray(self, struct_prefix, arg, num_lengths, lengths, indent, hea local_body.append(self.generateTodoFor(arg.name + '(input pointer)', indent)) return local_header, local_body - + + def handlePointer(self, struct_prefix, arg, num_lengths, lengths, indent, header, body, isFirstArg, isLastArg): + local_header = [] + local_body = [] + local_header.extend(header) + local_body.extend(body) + arg_name = struct_prefix + arg.name + struct_arg = f'"reinterpret_cast<{arg.platform_full_type}>(" << {arg_name} << ")"' + local_body.append(makeOutStructSet(struct_arg, locals(), isFirstArg, isLastArg, indent)) + + return local_header, local_body def handleOutputParam(self, struct_prefix, arg, num_lengths, lengths, indent, header, body, isFirstArg, isLastArg): local_header = [] @@ -836,6 +846,9 @@ def makeStructDeclBody(self, structName): elif self.is_input_pointer(arg) and arg.is_array: header, body = self.handleInputArray(struct_prefix, arg, num_lengths, lengths, indent, header, body, isFirstArg, isLastArg) + elif arg.platform_full_type is not None and arg.pointer_count > 0: + header, body = self.handlePointer(struct_prefix, arg, num_lengths, lengths, indent, header, body, isFirstArg, isLastArg) + else: struct_arg = arg_name if 'int8_t' in arg.base_type: From ed14f15539c11509d8ee3e1b32d71757573716d2 Mon Sep 17 00:00:00 2001 From: Locke Lin <47329816+locke-lunarg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:49:05 -0700 Subject: [PATCH 68/70] Release ID3D12GraphicsCommandList4 (#1866) Use ID3D12GraphicsCommandList4ComPtr to release automatically. --- framework/decode/dx12_dump_resources.cpp | 8 ++-- .../decode/dx12_replay_consumer_base.cpp | 6 +-- .../dx12_replay_consumer_body_generator.py | 2 +- .../generated_dx12_replay_consumer.cpp | 40 +++++++++---------- 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/framework/decode/dx12_dump_resources.cpp b/framework/decode/dx12_dump_resources.cpp index 0f1f226434..0aec96c308 100644 --- a/framework/decode/dx12_dump_resources.cpp +++ b/framework/decode/dx12_dump_resources.cpp @@ -455,7 +455,7 @@ void Dx12DumpResources::BeginRenderPass( } // before - ID3D12GraphicsCommandList4* command_list4_before; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4_before; dump_command_sets[graphics::dx12::kBeforeDrawCallArrayIndex].list->QueryInterface( IID_PPV_ARGS(&command_list4_before)); @@ -479,7 +479,7 @@ void Dx12DumpResources::BeginRenderPass( command_list4_before->BeginRenderPass(NumRenderTargets, before_rt_descs.data(), p_before_ds_desc, Flags); // draw call - ID3D12GraphicsCommandList4* command_list4_draw_call; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4_draw_call; dump_command_sets[graphics::dx12::kDrawCallArrayIndex].list->QueryInterface( IID_PPV_ARGS(&command_list4_draw_call)); @@ -507,7 +507,7 @@ void Dx12DumpResources::BeginRenderPass( NumRenderTargets, draw_call_rt_descs.data(), p_draw_call_ds_desc, Flags); // after - ID3D12GraphicsCommandList4* command_list4_after; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4_after; dump_command_sets[graphics::dx12::kAfterDrawCallArrayIndex].list->QueryInterface( IID_PPV_ARGS(&command_list4_after)); @@ -532,7 +532,7 @@ void Dx12DumpResources::BeginRenderPass( } else if (dump_command_sets.size() == 1) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; dump_command_sets[0].list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->BeginRenderPass( NumRenderTargets, pRenderTargets->GetPointer(), pDepthStencil->GetPointer(), Flags); diff --git a/framework/decode/dx12_replay_consumer_base.cpp b/framework/decode/dx12_replay_consumer_base.cpp index 025bfbbd49..bb9f38d2a6 100644 --- a/framework/decode/dx12_replay_consumer_base.cpp +++ b/framework/decode/dx12_replay_consumer_base.cpp @@ -3925,7 +3925,7 @@ void Dx12ReplayConsumerBase::OverrideBuildRaytracingAccelerationStructure( format::ApiCall_ID3D12GraphicsCommandList4_BuildRaytracingAccelerationStructure); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->BuildRaytracingAccelerationStructure( desc->GetPointer(), num_post_build_info_descs, post_build_info_descs->GetPointer()); @@ -4108,7 +4108,7 @@ void Dx12ReplayConsumerBase::OverrideDispatchRays(DxObjectInfo* command_list4_ob command_list4_object_info, GetCurrentBlockIndex(), format::ApiCall_ID3D12GraphicsCommandList4_DispatchRays); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->DispatchRays(desc_decoder->GetPointer()); } @@ -4141,7 +4141,7 @@ void Dx12ReplayConsumerBase::OverrideSetPipelineState1(DxObjectInfo* command_lis format::ApiCall_ID3D12GraphicsCommandList4_SetPipelineState1); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->SetPipelineState1(state_object); } diff --git a/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py b/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py index ce13807bbc..e7f83a0bb4 100644 --- a/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py +++ b/framework/generated/dx12_generators/dx12_replay_consumer_body_generator.py @@ -556,7 +556,7 @@ def make_consumer_func_body(self, return_type, name, values): ) if class_name != 'ID3D12GraphicsCommandList': code += ( - " {0}* command_list{1};\n" + " graphics::dx12::{0}ComPtr command_list{1};\n" " command_set.list->QueryInterface(IID_PPV_ARGS(&command_list{1}));\n".format(class_name, class_name[-1]) ) indent_length = len(code) diff --git a/framework/generated/generated_dx12_replay_consumer.cpp b/framework/generated/generated_dx12_replay_consumer.cpp index c4500a7671..c03f166523 100644 --- a/framework/generated/generated_dx12_replay_consumer.cpp +++ b/framework/generated/generated_dx12_replay_consumer.cpp @@ -7768,7 +7768,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList1_AtomicCopyBufferUINT auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList1_AtomicCopyBufferUINT); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList1* command_list1; + graphics::dx12::ID3D12GraphicsCommandList1ComPtr command_list1; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list1)); command_list1->AtomicCopyBufferUINT(in_pDstBuffer, DstOffset, @@ -7834,7 +7834,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList1_AtomicCopyBufferUINT auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList1_AtomicCopyBufferUINT64); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList1* command_list1; + graphics::dx12::ID3D12GraphicsCommandList1ComPtr command_list1; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list1)); command_list1->AtomicCopyBufferUINT64(in_pDstBuffer, DstOffset, @@ -7882,7 +7882,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList1_OMSetDepthBounds( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList1_OMSetDepthBounds); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList1* command_list1; + graphics::dx12::ID3D12GraphicsCommandList1ComPtr command_list1; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list1)); command_list1->OMSetDepthBounds(Min, Max); @@ -7923,7 +7923,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList1_SetSamplePositions( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList1_SetSamplePositions); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList1* command_list1; + graphics::dx12::ID3D12GraphicsCommandList1ComPtr command_list1; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list1)); command_list1->SetSamplePositions(NumSamplesPerPixel, NumPixels, @@ -7986,7 +7986,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList1_ResolveSubresourceRe auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList1_ResolveSubresourceRegion); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList1* command_list1; + graphics::dx12::ID3D12GraphicsCommandList1ComPtr command_list1; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list1)); command_list1->ResolveSubresourceRegion(in_pDstResource, DstSubresource, @@ -8035,7 +8035,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList1_SetViewInstanceMask( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList1_SetViewInstanceMask); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList1* command_list1; + graphics::dx12::ID3D12GraphicsCommandList1ComPtr command_list1; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list1)); command_list1->SetViewInstanceMask(Mask); } @@ -8075,7 +8075,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList2_WriteBufferImmediate auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList2_WriteBufferImmediate); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList2* command_list2; + graphics::dx12::ID3D12GraphicsCommandList2ComPtr command_list2; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list2)); command_list2->WriteBufferImmediate(Count, pParams->GetPointer(), @@ -12086,7 +12086,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList3_SetProtectedResource auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList3_SetProtectedResourceSession); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList3* command_list3; + graphics::dx12::ID3D12GraphicsCommandList3ComPtr command_list3; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list3)); command_list3->SetProtectedResourceSession(in_pProtectedResourceSession); } @@ -12181,7 +12181,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList4_EndRenderPass( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList4_EndRenderPass); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->EndRenderPass(); } @@ -12220,7 +12220,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList4_InitializeMetaComman auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList4_InitializeMetaCommand); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->InitializeMetaCommand(in_pMetaCommand, pInitializationParametersData->GetPointer(), @@ -12264,7 +12264,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList4_ExecuteMetaCommand( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList4_ExecuteMetaCommand); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->ExecuteMetaCommand(in_pMetaCommand, pExecutionParametersData->GetPointer(), @@ -12345,7 +12345,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList4_EmitRaytracingAccele auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList4_EmitRaytracingAccelerationStructurePostbuildInfo); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->EmitRaytracingAccelerationStructurePostbuildInfo(pDesc->GetPointer(), NumSourceAccelerationStructures, @@ -12390,7 +12390,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList4_CopyRaytracingAccele auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList4_CopyRaytracingAccelerationStructure); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList4* command_list4; + graphics::dx12::ID3D12GraphicsCommandList4ComPtr command_list4; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list4)); command_list4->CopyRaytracingAccelerationStructure(DestAccelerationStructureData, SourceAccelerationStructureData, @@ -13535,7 +13535,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList5_RSSetShadingRate( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList5_RSSetShadingRate); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList5* command_list5; + graphics::dx12::ID3D12GraphicsCommandList5ComPtr command_list5; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list5)); command_list5->RSSetShadingRate(baseShadingRate, combiners->GetPointer()); @@ -13571,7 +13571,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList5_RSSetShadingRateImag auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList5_RSSetShadingRateImage); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList5* command_list5; + graphics::dx12::ID3D12GraphicsCommandList5ComPtr command_list5; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list5)); command_list5->RSSetShadingRateImage(in_shadingRateImage); } @@ -13610,7 +13610,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList6_DispatchMesh( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList6_DispatchMesh); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList6* command_list6; + graphics::dx12::ID3D12GraphicsCommandList6ComPtr command_list6; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list6)); command_list6->DispatchMesh(ThreadGroupCountX, ThreadGroupCountY, @@ -13651,7 +13651,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList7_Barrier( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList7_Barrier); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList7* command_list7; + graphics::dx12::ID3D12GraphicsCommandList7ComPtr command_list7; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list7)); command_list7->Barrier(NumBarrierGroups, pBarrierGroups->GetPointer()); @@ -13689,7 +13689,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList8_OMSetFrontAndBackSte auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList8_OMSetFrontAndBackStencilRef); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList8* command_list8; + graphics::dx12::ID3D12GraphicsCommandList8ComPtr command_list8; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list8)); command_list8->OMSetFrontAndBackStencilRef(FrontStencilRef, BackStencilRef); @@ -13730,7 +13730,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList9_RSSetDepthBias( auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList9_RSSetDepthBias); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList9* command_list9; + graphics::dx12::ID3D12GraphicsCommandList9ComPtr command_list9; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list9)); command_list9->RSSetDepthBias(DepthBias, DepthBiasClamp, @@ -13767,7 +13767,7 @@ void Dx12ReplayConsumer::Process_ID3D12GraphicsCommandList9_IASetIndexBufferStri auto dump_command_sets = dump_resources_->GetCommandListsForDumpResources(replay_object, call_info.index, format::ApiCall_ID3D12GraphicsCommandList9_IASetIndexBufferStripCutValue); for (auto& command_set : dump_command_sets) { - ID3D12GraphicsCommandList9* command_list9; + graphics::dx12::ID3D12GraphicsCommandList9ComPtr command_list9; command_set.list->QueryInterface(IID_PPV_ARGS(&command_list9)); command_list9->IASetIndexBufferStripCutValue(IBStripCutValue); } From 1f79155ca3da10a8d96b7f2db9c5919af19c66ac Mon Sep 17 00:00:00 2001 From: Antonio Caggiano Date: Wed, 6 Nov 2024 18:02:08 -0700 Subject: [PATCH 69/70] tocpp: Support wayland (#1870) --- framework/decode/vulkan_cpp_consumer_base.cpp | 65 ++++ .../decode/vulkan_cpp_loader_generator.cpp | 9 +- .../decode/vulkan_cpp_template_strings.h | 281 ++++++++++++++++++ framework/decode/vulkan_cpp_utilities.h | 6 +- tools/tocpp/README.md | 23 +- 5 files changed, 367 insertions(+), 17 deletions(-) diff --git a/framework/decode/vulkan_cpp_consumer_base.cpp b/framework/decode/vulkan_cpp_consumer_base.cpp index d5df045a66..d2890ad846 100644 --- a/framework/decode/vulkan_cpp_consumer_base.cpp +++ b/framework/decode/vulkan_cpp_consumer_base.cpp @@ -94,6 +94,12 @@ void VulkanCppConsumerBase::WriteMainHeader() case GfxToCppPlatform::PLATFORM_XCB: fprintf(main_file_, "%s", sXcbOutputMainStart); break; + case GfxToCppPlatform::PLATFORM_WAYLAND: + fprintf(main_file_, "%s", sWaylandOutputMainStart); + break; + default: + GFXRECON_LOG_FATAL("Failed to write main header: Invalid platform (%d)", platform_); + break; } } @@ -110,6 +116,12 @@ void VulkanCppConsumerBase::WriteMainFooter() case GfxToCppPlatform::PLATFORM_XCB: fprintf(main_file_, "%s", sXcbOutputMainEnd); break; + case GfxToCppPlatform::PLATFORM_WAYLAND: + fprintf(main_file_, "%s", sWaylandOutputMainEnd); + break; + default: + GFXRECON_LOG_FATAL("Failed to write main footer: Invalid platform (%d)", platform_); + break; } } @@ -146,6 +158,17 @@ bool VulkanCppConsumerBase::WriteGlobalHeaderFile() sXcbOutputHeader, sCommonOutputHeaderFunctions); break; + case GfxToCppPlatform::PLATFORM_WAYLAND: + fprintf(header_file, + "%s%s%s%s", + sWaylandOutputHeadersPlatform, + sCommonHeaderOutputHeaders, + sWaylandOutputHeader, + sCommonOutputHeaderFunctions); + break; + default: + GFXRECON_LOG_FATAL("Failed to write global header file: Invalid platform (%d)", platform_); + break; } PrintToFile(header_file, "extern %s;\n", GfxToCppVariable::GenerateStringVec(variable_data_)); @@ -188,6 +211,12 @@ void VulkanCppConsumerBase::PrintOutCMakeFile() case GfxToCppPlatform::PLATFORM_XCB: fprintf(cmake_file, "%s", sXcbCMakeFile); break; + case GfxToCppPlatform::PLATFORM_WAYLAND: + fprintf(cmake_file, "%s", sWaylandCMakeFile); + break; + default: + GFXRECON_LOG_FATAL("Failed to print out CMake file: Unknown platform (%d)", platform_); + break; } util::platform::FileClose(cmake_file); } @@ -273,6 +302,22 @@ void VulkanCppConsumerBase::PrintOutGlobalVar() delete[] formatted_output_override_method; break; } + case GfxToCppPlatform::PLATFORM_WAYLAND: + { + int size = snprintf(NULL, 0, sWaylandOutputOverrideMethod, window_width_, window_height_); + char* formatted_output_override_method = new char[size + 2]; + snprintf(formatted_output_override_method, + size + 2, + sWaylandOutputOverrideMethod, + window_width_, + window_height_); + fputs(formatted_output_override_method, global_file); + delete[] formatted_output_override_method; + break; + } + default: + GFXRECON_LOG_FATAL("Failed to print out global var: Invalid platform (%d)", platform_); + break; } PrintToFile(global_file, "%s;\n", GfxToCppVariable::GenerateStringVec(variable_data_)); @@ -1703,6 +1748,26 @@ void VulkanCppConsumerBase::GenerateSurfaceCreation(GfxToCppPlatform plat surface_create_func_call = "vkCreateXcbSurfaceKHR"; break; } + case GfxToCppPlatform::PLATFORM_WAYLAND: + { + VkWaylandSurfaceCreateInfoKHR wayland_struct_info = {}; + Decoded_VkWaylandSurfaceCreateInfoKHR decoded_wayland_info = {}; + + if (platform_ == platform) + { + wayland_struct_info = + *reinterpret_cast*>(pSurfaceCreateInfo) + ->GetPointer(); + } + wayland_struct_info.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR; + create_info_struct_var_name = GenerateStruct_VkWaylandSurfaceCreateInfoKHR( + stream_create_info, &wayland_struct_info, &decoded_wayland_info, *this); + surface_create_func_call = "vkCreateWaylandSurfaceKHR"; + break; + } + default: + GFXRECON_LOG_FATAL("Failed to generate surface creation: Invalid platform (%d)", platform_); + break; } fprintf(file, "\n%s", stream_create_info.str().c_str()); AddKnownVariables("VkSurfaceKHR", surface_var_name, pSurface); diff --git a/framework/decode/vulkan_cpp_loader_generator.cpp b/framework/decode/vulkan_cpp_loader_generator.cpp index a6c2454a0b..8168332e23 100644 --- a/framework/decode/vulkan_cpp_loader_generator.cpp +++ b/framework/decode/vulkan_cpp_loader_generator.cpp @@ -65,18 +65,21 @@ void VulkanCppLoaderGenerator::WriteOutLoaderGenerator(const std::string& outDir case GfxToCppPlatform::PLATFORM_XCB: fprintf(pfn_src_file, "#define VK_USE_PLATFORM_XCB_KHR\n"); break; + case GfxToCppPlatform::PLATFORM_WAYLAND: + fprintf(pfn_src_file, "#define VK_USE_PLATFORM_WAYLAND_KHR\n"); + break; #if 0 // TODO: implement these platforms case GfxToCppPlatform::PLATFORM_MACOS: fprintf(pfn_src_file, "#define VK_USE_PLATFORM_METAL_EXT\n"); break; - case GfxToCppPlatform::PLATFORM_WAYLAND: - fprintf(pfn_src_file, "#define VK_USE_PLATFORM_WAYLAND_KHR\n"); - break; case GfxToCppPlatform::PLATFORM_XLIB: fprintf(pfn_src_file, "#define VK_USE_PLATFORM_XLIB_KHR\n"); break; #endif + default: + GFXRECON_LOG_FATAL("Failed to write out loader generator: Invalid platform (%d)", platform); + break; } fprintf(pfn_src_file, "#include \"loader.h\"\n\n"); diff --git a/framework/decode/vulkan_cpp_template_strings.h b/framework/decode/vulkan_cpp_template_strings.h index d05e9f698a..9d0521678d 100644 --- a/framework/decode/vulkan_cpp_template_strings.h +++ b/framework/decode/vulkan_cpp_template_strings.h @@ -324,6 +324,287 @@ target_link_libraries(vulkan_app vulkan xcb) )"; // End of Xcb template strings +// Beginning of Wayland template strings +static const char* sWaylandOutputMainStart = R"( +#include "global_var.h" + +int main() { +)"; + +static const char* sWaylandOutputMainEnd = R"( + return 0; +} +)"; + +static const char* sWaylandOutputHeadersPlatform = R"( +// This file is a generated source, follow the instructions under tools/tocpp/README.md to build. +#define VK_USE_PLATFORM_WAYLAND_KHR +)"; + +static const char* sWaylandOutputHeader = R"( +#include +#include + +#define VK_CALL_CHECK(VK_CALL, VK_RESULT) \ + LogVkError(#VK_CALL, (VK_CALL), __FILE__, __LINE__, VK_RESULT) + +struct WaylandApp { + WaylandApp(uint32_t w, uint32_t h) { + width = w; + height = h; + } + + ~WaylandApp(); + + uint32_t width { 320 }; + uint32_t height { 240 }; + + struct { + wl_display *display { nullptr }; + wl_compositor *compositor { nullptr }; + wl_surface *surface { nullptr }; + int shm_fd {-1}; + wl_shm *shm { nullptr }; + wl_shm_pool *shm_pool { nullptr }; + wl_buffer *buffer { nullptr }; + } wl; + + struct { + xdg_wm_base *wm_base { nullptr }; + xdg_surface *surface { nullptr }; + xdg_toplevel *toplevel { nullptr }; + } xdg; +}; + +extern WaylandApp appdata; + +extern void OverrideVkWaylandSurfaceCreateInfoKHR(VkWaylandSurfaceCreateInfoKHR* createInfo, + struct WaylandApp& appdata); +extern void UpdateWindowSize(uint32_t width, + uint32_t height, + uint32_t pre_transform, + struct WaylandApp& appdata); +extern void LogVkError(const char* function, + VkResult returnValue, + const char* file, + int line, + VkResult capturedReturnValue); +extern size_t LoadBinaryData(const char* filename, + size_t file_offset, + void* buffer, + size_t offset, + size_t data_size, + struct WaylandApp& appdata); +)"; + +static const char* sWaylandOutputOverrideMethod = R"( +#include +#include +#include +#include +#include + +WaylandApp::~WaylandApp() +{ + wl_buffer_destroy(appdata.wl.buffer); + wl_shm_pool_destroy(appdata.wl.shm_pool); + wl_shm_destroy(appdata.wl.shm); + close(appdata.wl.shm_fd); + xdg_wm_base_destroy(appdata.xdg.wm_base); + xdg_toplevel_destroy(appdata.xdg.toplevel); + xdg_surface_destroy(appdata.xdg.surface); + wl_surface_destroy(appdata.wl.surface); + wl_compositor_destroy(appdata.wl.compositor); + wl_display_disconnect(appdata.wl.display); +} + +static void Randname(char *buf) +{ + struct timespec ts; + clock_gettime(CLOCK_REALTIME, &ts); + long r = ts.tv_nsec; + for (int i = 0; i < 6; ++i) { + buf[i] = 'A'+(r&15)+(r&16)*2; + r >>= 5; + } +} + +static int CreateShmFile(void) +{ + int retries = 100; + do { + char name[] = "/wl_shm-XXXXXX"; + Randname(name + sizeof(name) - 7); + --retries; + int fd = shm_open(name, O_RDWR | O_CREAT | O_EXCL, 0600); + if (fd >= 0) { + shm_unlink(name); + return fd; + } + } while (retries > 0 && errno == EEXIST); + return -1; +} + +int AllocateShmFile(size_t size) +{ + int fd = CreateShmFile(); + if (fd < 0) + return -1; + int ret; + do { + ret = ftruncate(fd, size); + } while (ret < 0 && errno == EINTR); + if (ret < 0) { + close(fd); + return -1; + } + return fd; +} + +static void +RegistryHandleGlobal(void *data, wl_registry *registry, uint32_t name, const char* interface, uint32_t version) +{ + WaylandApp *app = (WaylandApp *)data; + if (strcmp(interface, wl_compositor_interface.name) == 0) + { + app->wl.compositor = (wl_compositor *)wl_registry_bind(registry, name, &wl_compositor_interface, 4); + } + if (strcmp(interface, wl_shm_interface.name) == 0) + { + app->wl.shm = (wl_shm *)wl_registry_bind(registry, name, &wl_shm_interface, 1); + } + if (strcmp(interface, xdg_wm_base_interface.name) == 0) + { + app->xdg.wm_base = (xdg_wm_base *)wl_registry_bind(registry, name, &xdg_wm_base_interface, 1); + } +} + +static void RegistryHandleGlobalRemove(void *data, wl_registry *registry, uint32_t name) +{ +} + +static const wl_registry_listener registry_listener = { + .global = RegistryHandleGlobal, + .global_remove = RegistryHandleGlobalRemove, +}; + +static void XdgSurfaceConfigure(void *data, xdg_surface *xdg_surface, uint32_t serial) {} + +static const xdg_surface_listener surface_listener = { + .configure = XdgSurfaceConfigure, +}; + +void OverrideVkWaylandSurfaceCreateInfoKHR(VkWaylandSurfaceCreateInfoKHR* createInfo, WaylandApp& appdata) +{ + if (appdata.wl.display == nullptr) { + // Open the connection to the wayland server + appdata.wl.display = wl_display_connect(nullptr); + + if (appdata.wl.display == nullptr) + { + printf("Cannot open display\n"); + exit(1); + } + + wl_registry* registry = wl_display_get_registry(appdata.wl.display); + wl_registry_add_listener(registry, ®istry_listener, &appdata); + wl_display_roundtrip(appdata.wl.display); + wl_registry_destroy(registry); + + appdata.wl.surface = wl_compositor_create_surface(appdata.wl.compositor); + + appdata.xdg.surface = xdg_wm_base_get_xdg_surface(appdata.xdg.wm_base, appdata.wl.surface); + xdg_surface_add_listener(appdata.xdg.surface, &surface_listener, &appdata); + appdata.xdg.toplevel = xdg_surface_get_toplevel(appdata.xdg.surface); + xdg_toplevel_set_title(appdata.xdg.toplevel, "vulkan-app"); + + const int width = appdata.width; + const int height = appdata.height; + const int stride = width * 4; + const int shm_pool_size = height * stride * 2; + + appdata.wl.shm_fd = AllocateShmFile(shm_pool_size); + + appdata.wl.shm_pool = wl_shm_create_pool(appdata.wl.shm, appdata.wl.shm_fd, shm_pool_size); + + int index = 0; + int offset = height * stride * index; + appdata.wl.buffer = wl_shm_pool_create_buffer( + appdata.wl.shm_pool, offset, width, height, stride, WL_SHM_FORMAT_XRGB8888); + + wl_surface_attach(appdata.wl.surface, appdata.wl.buffer, 0, 0); + wl_surface_damage(appdata.wl.surface, 0, 0, UINT32_MAX, UINT32_MAX); + wl_surface_commit(appdata.wl.surface); + } + + createInfo->sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR; + createInfo->display = appdata.wl.display; + createInfo->surface = appdata.wl.surface; +} + +void UpdateWindowSize(uint32_t width, uint32_t height, uint32_t pretransform, WaylandApp& appdata) +{ + appdata.width = width; + appdata.height = height; +} + +size_t LoadBinaryData(const char* filename, + size_t file_offset, + void* buffer, + size_t offset, + size_t data_size, + WaylandApp& appdata) +{ + (void)appdata; // Unused + + FILE* fp = fopen(filename, "rb"); + if (fp == nullptr) + { + throw std::runtime_error("Error while opening file: " + std::string(filename)); + } + + fseek(fp, file_offset, SEEK_SET); + size_t read_size = fread((uint8_t *)buffer + offset, sizeof(uint8_t), data_size, fp); + if (read_size != data_size) + { + fclose(fp); + throw std::runtime_error("Error while reading file: " + std::string(filename)); + } + + fclose(fp); + return read_size; +} + +WaylandApp appdata(%d, %d); +)"; + +static const char* sWaylandCMakeFile = R"( +cmake_minimum_required(VERSION 3.7) +project(vulkan_app) +set (CMAKE_CXX_STANDARD 11) + +find_program(WAYLAND_SCANNER NAMES wayland-scanner REQUIRED) +set(XDG_SHELL_PROTOCOL_C ${CMAKE_CURRENT_BINARY_DIR}/xdg-shell-protocol.c) +set(XDG_SHELL_CLIENT_PROTOCOL_H ${CMAKE_CURRENT_BINARY_DIR}/xdg-shell-client-protocol.h) +set(XDG_SHELL_XML /usr/share/wayland-protocols/stable/xdg-shell/xdg-shell.xml) +add_custom_command( + OUTPUT ${XDG_SHELL_CLIENT_PROTOCOL_H} ${XDG_SHELL_PROTOCOL_C} + COMMAND ${WAYLAND_SCANNER} client-header ${XDG_SHELL_XML} ${XDG_SHELL_CLIENT_PROTOCOL_H} + COMMAND ${WAYLAND_SCANNER} private-code ${XDG_SHELL_XML} ${XDG_SHELL_PROTOCOL_C} + DEPENDS ${XDG_SHELL_XML} +) + +include_directories(${PROJECT_SOURCE_DIR}/src/ ${CMAKE_CURRENT_BINARY_DIR}) +file(GLOB SRC_FILES ${PROJECT_SOURCE_DIR}/src/*.cpp) +file(GLOB MAIN_FILE ${PROJECT_SOURCE_DIR}/*.cpp) +add_executable(vulkan_app ${SRC_FILES} ${MAIN_FILE} ${XDG_SHELL_PROTOCOL_C}) +find_package(Vulkan REQUIRED) +find_package(PkgConfig) +pkg_check_modules(WAYLAND_CLIENT REQUIRED wayland-client) +target_link_libraries(vulkan_app Vulkan::Vulkan ${WAYLAND_CLIENT_LIBRARIES}) +)"; +// End of Wayland template strings + // Beginning of Android template strings static const char* sAndroidOutputHeadersPlatform = R"( // This file is a generated source, follow the instructions under tools/tocpp/README.md to build. diff --git a/framework/decode/vulkan_cpp_utilities.h b/framework/decode/vulkan_cpp_utilities.h index a0686128c8..2a60e7b7f1 100644 --- a/framework/decode/vulkan_cpp_utilities.h +++ b/framework/decode/vulkan_cpp_utilities.h @@ -35,7 +35,7 @@ enum class GfxToCppPlatform { PLATFORM_ANDROID, // PLATFORM_MACOS, - // PLATFORM_WAYLAND, + PLATFORM_WAYLAND, PLATFORM_WIN32, PLATFORM_XCB, // PLATFORM_XLIB, @@ -50,7 +50,7 @@ struct PlatformTargetInfo const std::map kTargetPlatforms = { { GfxToCppPlatform::PLATFORM_ANDROID, { "android", VK_KHR_ANDROID_SURFACE_EXTENSION_NAME } }, //{ GfxToCppPlatform::PLATFORM_MACOS, {"macos", VK_EXT_METAL_SURFACE_EXTENSION_NAME } }, - //{ GfxToCppPlatform::PLATFORM_WAYLAND, {"wayland", VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME } }, + { GfxToCppPlatform::PLATFORM_WAYLAND, { "wayland", VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME } }, { GfxToCppPlatform::PLATFORM_WIN32, { "win32", VK_KHR_WIN32_SURFACE_EXTENSION_NAME } }, { GfxToCppPlatform::PLATFORM_XCB, { "xcb", VK_KHR_XCB_SURFACE_EXTENSION_NAME } }, //{ GfxToCppPlatform::PLATFORM_XLIB, {"xlib", VK_KHR_XLIB_SURFACE_EXTENSION_NAME } }, @@ -59,7 +59,7 @@ const std::map kTargetPl const std::map kTargetPlatformByName = { { "android", GfxToCppPlatform::PLATFORM_ANDROID }, //{ "macos", GfxToCppPlatform::PLATFORM_MACOS}, - //{ "wayland", GfxToCppPlatform::PLATFORM_WAYLAND}, + { "wayland", GfxToCppPlatform::PLATFORM_WAYLAND }, { "win32", GfxToCppPlatform::PLATFORM_WIN32 }, { "xcb", GfxToCppPlatform::PLATFORM_XCB }, //{ "xlib", GfxToCppPlatform::PLATFORM_XLIB}, diff --git a/tools/tocpp/README.md b/tools/tocpp/README.md index 607fd701c5..01654caa94 100644 --- a/tools/tocpp/README.md +++ b/tools/tocpp/README.md @@ -19,7 +19,7 @@ These limitations are discussed below. Because the ToCpp tool is still not in a complete state, it has the following known issues which will be worked on over time: -* The generated cpp code only executes on either Android or Linux (XCB) +* The generated cpp code only executes on either Android or Linux (XCB or Wayland) * The generated cpp code does not support multi-window capture on Android * It does not currently support Ray Tracing contents in a capture * The generated cpp code expects a system exactly like the capture system: @@ -62,7 +62,7 @@ gfxrecon-tocpp | -h
--help | Optional | Print Usage information and exit. | | -o
--output

| Required | Directory path where the output will be generated into. | | -s --captured-swapchain | Optional | Use the swapchain as it was captured during toCpp replay instead of using the "Virtual Swapchain" path. | -| -t
--target | Optional | Type of target platform to generate the Vulkan source.
Available Platforms: android, xcb | +| -t
--target | Optional | Type of target platform to generate the Vulkan source.
Available Platforms: android, xcb, wayland | | -v
--version | Optional | Print version information and exit. | ## Generate Source From a Capture @@ -119,26 +119,27 @@ adb install ./app/build/outputs/apk/debug/app-debug.apk --- -### Linux (XCB) +### Linux ```sh # In the root of GFXReconstruct # Where 'build' in this case is the build folder used to generate the source -./build/tools/tocpp/gfxrecon-tocpp -t xcb -o out_xcb_capture ./capture.gfxr +./build/tools/tocpp/gfxrecon-tocpp -t xcb -o out_capture ./capture.gfxr ``` -* `-t xcb` indicates that we generate for XCB platform. -* `-o out_xcb_capture` specify the directory where the generated files will be +* `-t xcb` indicates that we generate for XCB platform. Alternatively, you could use `-t wayland` to generate sources for + Wayland. +* `-o out_capture` specify the directory where the generated files will be placed. -At the end the output directory (`out_xcb_capture`) contains the generated +At the end the output directory (`out_capture`) contains the generated `capture.cpp` and the saved image data (`*.bin`). -### Build the source for desktop (XCB) +#### Build the source for desktop ```sh #cd -cd out_xcb_capture +cd out_capture # Generate build contents cmake -H. -Bbuild @@ -147,10 +148,10 @@ cmake -H. -Bbuild cmake --build build ``` -### Run the source for desktop (XCB) +#### Run the source for desktop ```sh -# Inside the output directory (`out_xcb_capture`) +# Inside the output directory (`out_capture`) ./build/vulkan_app ``` From 62202d8cddf611483eda3d3f214c77830afc8166 Mon Sep 17 00:00:00 2001 From: ziga-lunarg <87310389+ziga-lunarg@users.noreply.github.com> Date: Thu, 7 Nov 2024 23:10:42 +0100 Subject: [PATCH 70/70] Combine common code from vulkan capture manager and vulkan state writer (#1841) * Combine common code from vulkan capture manager and vulkan state writer --- android/framework/encode/CMakeLists.txt | 2 + framework/encode/CMakeLists.txt | 2 + framework/encode/vulkan_capture_common.cpp | 260 +++++++++++++++++++ framework/encode/vulkan_capture_common.h | 53 ++++ framework/encode/vulkan_capture_manager.cpp | 272 +++----------------- framework/encode/vulkan_capture_manager.h | 5 +- framework/encode/vulkan_state_writer.cpp | 169 +----------- framework/encode/vulkan_state_writer.h | 13 +- 8 files changed, 375 insertions(+), 401 deletions(-) create mode 100644 framework/encode/vulkan_capture_common.cpp create mode 100644 framework/encode/vulkan_capture_common.h diff --git a/android/framework/encode/CMakeLists.txt b/android/framework/encode/CMakeLists.txt index 79a48ca260..935bbfde6a 100644 --- a/android/framework/encode/CMakeLists.txt +++ b/android/framework/encode/CMakeLists.txt @@ -38,6 +38,8 @@ target_sources(gfxrecon_encode ${GFXRECON_SOURCE_DIR}/framework/encode/vulkan_state_writer.h ${GFXRECON_SOURCE_DIR}/framework/encode/vulkan_state_writer.cpp ${GFXRECON_SOURCE_DIR}/framework/encode/vulkan_track_struct.h + ${GFXRECON_SOURCE_DIR}/framework/encode/vulkan_capture_common.cpp + ${GFXRECON_SOURCE_DIR}/framework/encode/vulkan_capture_common.h ${GFXRECON_SOURCE_DIR}/framework/generated/generated_encode_pnext_struct.cpp ${GFXRECON_SOURCE_DIR}/framework/generated/generated_vulkan_api_call_encoders.h ${GFXRECON_SOURCE_DIR}/framework/generated/generated_vulkan_api_call_encoders.cpp diff --git a/framework/encode/CMakeLists.txt b/framework/encode/CMakeLists.txt index e540117155..c8ab2a72b0 100644 --- a/framework/encode/CMakeLists.txt +++ b/framework/encode/CMakeLists.txt @@ -93,6 +93,8 @@ target_sources(gfxrecon_encode ${CMAKE_CURRENT_LIST_DIR}/struct_pointer_encoder.h ${CMAKE_CURRENT_LIST_DIR}/vulkan_capture_manager.h ${CMAKE_CURRENT_LIST_DIR}/vulkan_capture_manager.cpp + ${CMAKE_CURRENT_LIST_DIR}/vulkan_capture_common.h + ${CMAKE_CURRENT_LIST_DIR}/vulkan_capture_common.cpp ${CMAKE_CURRENT_LIST_DIR}/vulkan_device_address_tracker.h ${CMAKE_CURRENT_LIST_DIR}/vulkan_device_address_tracker.cpp ${CMAKE_CURRENT_LIST_DIR}/vulkan_handle_wrappers.h diff --git a/framework/encode/vulkan_capture_common.cpp b/framework/encode/vulkan_capture_common.cpp new file mode 100644 index 0000000000..b012a22b07 --- /dev/null +++ b/framework/encode/vulkan_capture_common.cpp @@ -0,0 +1,260 @@ +/* + ** Copyright (c) 2024 LunarG, Inc. + ** + ** Permission is hereby granted, free of charge, to any person obtaining a + ** copy of this software and associated documentation files (the "Software"), + ** to deal in the Software without restriction, including without limitation + ** the rights to use, copy, modify, merge, publish, distribute, sublicense, + ** and/or sell copies of the Software, and to permit persons to whom the + ** Software is furnished to do so, subject to the following conditions: + ** + ** The above copyright notice and this permission notice shall be included in + ** all copies or substantial portions of the Software. + ** + ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + ** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + ** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + ** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + ** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + ** DEALINGS IN THE SOFTWARE. + */ + +#include "vulkan_capture_common.h" + +#if defined(VK_USE_PLATFORM_ANDROID_KHR) +#include +#endif + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) +GFXRECON_BEGIN_NAMESPACE(encode) + +void CommonWriteCreateHardwareBufferCmd(format::ThreadId thread_id, + format::HandleId memory_id, + AHardwareBuffer* hardware_buffer, + const std::vector& plane_info, + VulkanCaptureManager* vulkan_capture_manager, + VulkanStateWriter* vulkan_state_writer) +{ +#if defined(VK_USE_PLATFORM_ANDROID_KHR) + if (vulkan_capture_manager && !vulkan_capture_manager->IsCaptureModeWrite()) + { + return; + } + assert(hardware_buffer != nullptr); + + format::CreateHardwareBufferCommandHeader create_buffer_cmd; + + create_buffer_cmd.meta_header.block_header.type = format::BlockType::kMetaDataBlock; + create_buffer_cmd.meta_header.block_header.size = format::GetMetaDataBlockBaseSize(create_buffer_cmd); + create_buffer_cmd.meta_header.meta_data_id = format::MakeMetaDataId( + format::ApiFamilyId::ApiFamily_Vulkan, format::MetaDataType::kCreateHardwareBufferCommand); + create_buffer_cmd.thread_id = thread_id; + create_buffer_cmd.memory_id = memory_id; + create_buffer_cmd.buffer_id = reinterpret_cast(hardware_buffer); + + // Get AHB description data. + AHardwareBuffer_Desc ahb_desc = {}; + AHardwareBuffer_describe(hardware_buffer, &ahb_desc); + + create_buffer_cmd.format = ahb_desc.format; + create_buffer_cmd.width = ahb_desc.width; + create_buffer_cmd.height = ahb_desc.height; + create_buffer_cmd.stride = ahb_desc.stride; + create_buffer_cmd.usage = ahb_desc.usage; + create_buffer_cmd.layers = ahb_desc.layers; + + size_t planes_size = 0; + + if (plane_info.empty()) + { + create_buffer_cmd.planes = 0; + } + else + { + create_buffer_cmd.planes = static_cast(plane_info.size()); + // Update size of packet with compressed or uncompressed data size. + planes_size = sizeof(plane_info[0]) * plane_info.size(); + create_buffer_cmd.meta_header.block_header.size += planes_size; + } + + if (vulkan_capture_manager) + { + if (planes_size > 0) + { + vulkan_capture_manager->CombineAndWriteToFile( + { { &create_buffer_cmd, sizeof(create_buffer_cmd) }, { plane_info.data(), planes_size } }); + } + else + { + vulkan_capture_manager->WriteToFile(&create_buffer_cmd, sizeof(create_buffer_cmd)); + } + } + else if (vulkan_state_writer) + { + vulkan_state_writer->OutputStreamWrite(&create_buffer_cmd, sizeof(create_buffer_cmd)); + + if (planes_size > 0) + { + vulkan_state_writer->OutputStreamWrite(plane_info.data(), planes_size); + } + } +#else + GFXRECON_UNREFERENCED_PARAMETER(thread_id); + GFXRECON_UNREFERENCED_PARAMETER(memory_id); + GFXRECON_UNREFERENCED_PARAMETER(hardware_buffer); + GFXRECON_UNREFERENCED_PARAMETER(plane_info); + GFXRECON_UNREFERENCED_PARAMETER(vulkan_capture_manager); + GFXRECON_UNREFERENCED_PARAMETER(vulkan_state_writer); + + GFXRECON_LOG_ERROR("Skipping create AHardwareBuffer command write for unsupported platform"); +#endif +} + +static void CommonWriteFillMemoryCmd(format::HandleId memory_id, + uint64_t size, + const void* data, + VulkanCaptureManager* vulkan_capture_manager, + VulkanStateWriter* vulkan_state_writer) +{ + if (vulkan_capture_manager) + { + vulkan_capture_manager->WriteFillMemoryCmd(memory_id, 0u, size, data); + } + else + { + vulkan_state_writer->WriteFillMemoryCmd(memory_id, 0u, size, data); + } +} + +void CommonProcessHardwareBuffer(format::ThreadId thread_id, + format::HandleId memory_id, + AHardwareBuffer* hardware_buffer, + size_t allocation_size, + VulkanCaptureManager* vulkan_capture_manager, + VulkanStateWriter* vulkan_state_writer) +{ +#if defined(VK_USE_PLATFORM_ANDROID_KHR) + assert(hardware_buffer != nullptr); + + // If this is the first device memory object to reference the hardware buffer, write a buffer creation + // command to the capture file and setup memory tracking. + + std::vector plane_info; + + AHardwareBuffer_Desc desc; + AHardwareBuffer_describe(hardware_buffer, &desc); + + if ((desc.usage & AHARDWAREBUFFER_USAGE_CPU_READ_MASK) != 0) + { + void* data = nullptr; + int result = -1; + + // The multi-plane functions are declared for API 26, but are only available to link with API 29. So, this + // could be turned into a run-time check dependent on dlsym returning a valid pointer for + // AHardwareBuffer_lockPlanes. +#if __ANDROID_API__ >= 29 + AHardwareBuffer_Planes ahb_planes; + result = + AHardwareBuffer_lockPlanes(hardware_buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, -1, nullptr, &ahb_planes); + if (result == 0) + { + data = ahb_planes.planes[0].data; + + for (uint32_t i = 0; i < ahb_planes.planeCount; ++i) + { + format::HardwareBufferPlaneInfo ahb_plane_info; + ahb_plane_info.offset = + reinterpret_cast(ahb_planes.planes[i].data) - reinterpret_cast(data); + ahb_plane_info.pixel_stride = ahb_planes.planes[i].pixelStride; + ahb_plane_info.row_pitch = ahb_planes.planes[i].rowStride; + plane_info.emplace_back(std::move(ahb_plane_info)); + } + } + else + { + GFXRECON_LOG_WARNING("AHardwareBuffer_lockPlanes failed: AHardwareBuffer_lock will be used instead"); + } +#endif + + // Write CreateHardwareBufferCmd with or without the AHB payload + CommonWriteCreateHardwareBufferCmd( + thread_id, memory_id, hardware_buffer, plane_info, vulkan_capture_manager, vulkan_state_writer); + + // If AHardwareBuffer_lockPlanes failed (or is not available) try AHardwareBuffer_lock + if (result != 0) + { + result = AHardwareBuffer_lock(hardware_buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, -1, nullptr, &data); + } + + if (result == 0 && data != nullptr) + { + CommonWriteFillMemoryCmd(memory_id, allocation_size, data, vulkan_capture_manager, vulkan_state_writer); + + if (vulkan_capture_manager) + { + // Track the memory with the PageGuardManager + const auto tracking_mode = vulkan_capture_manager->GetMemoryTrackingMode(); + if ((tracking_mode == CaptureSettings::MemoryTrackingMode::kPageGuard || + tracking_mode == CaptureSettings::MemoryTrackingMode::kUserfaultfd) && + vulkan_capture_manager->GetPageGuardTrackAhbMemory()) + { + GFXRECON_CHECK_CONVERSION_DATA_LOSS(size_t, allocation_size); + + util::PageGuardManager* manager = util::PageGuardManager::Get(); + assert(manager != nullptr); + + manager->AddTrackedMemory(memory_id, + data, + 0, + static_cast(allocation_size), + util::PageGuardManager::kNullShadowHandle, + false, // No shadow memory for the imported AHB memory. + false); // Write watch is not supported for this case. + } + } + + result = AHardwareBuffer_unlock(hardware_buffer, nullptr); + if (result != 0) + { + GFXRECON_LOG_ERROR("AHardwareBuffer_unlock failed"); + } + } + else + { + GFXRECON_LOG_ERROR( + "AHardwareBuffer_lock failed: hardware buffer data will be omitted from the capture file"); + + // Dump zeros for AHB payload. + std::vector zeros(allocation_size, 0); + CommonWriteFillMemoryCmd( + memory_id, zeros.size(), zeros.data(), vulkan_capture_manager, vulkan_state_writer); + } + } + else + { + // The AHB is not CPU-readable + + // Write CreateHardwareBufferCmd without the AHB payload + CommonWriteCreateHardwareBufferCmd( + thread_id, memory_id, hardware_buffer, plane_info, vulkan_capture_manager, vulkan_state_writer); + + // Dump zeros for AHB payload. + std::vector zeros(allocation_size, 0); + CommonWriteFillMemoryCmd(memory_id, zeros.size(), zeros.data(), vulkan_capture_manager, vulkan_state_writer); + + GFXRECON_LOG_WARNING("AHardwareBuffer cannot be read: hardware buffer data will be omitted " + "from the capture file"); + } +#else + GFXRECON_UNREFERENCED_PARAMETER(thread_id); + GFXRECON_UNREFERENCED_PARAMETER(memory_id); + GFXRECON_UNREFERENCED_PARAMETER(hardware_buffer); + GFXRECON_UNREFERENCED_PARAMETER(allocation_size); + GFXRECON_UNREFERENCED_PARAMETER(vulkan_capture_manager); + GFXRECON_UNREFERENCED_PARAMETER(vulkan_state_writer); +#endif +} + +GFXRECON_END_NAMESPACE(encode) +GFXRECON_END_NAMESPACE(gfxrecon) diff --git a/framework/encode/vulkan_capture_common.h b/framework/encode/vulkan_capture_common.h new file mode 100644 index 0000000000..76bc5a0c2a --- /dev/null +++ b/framework/encode/vulkan_capture_common.h @@ -0,0 +1,53 @@ +/* + ** Copyright (c) 2024 LunarG, Inc. + ** + ** Permission is hereby granted, free of charge, to any person obtaining a + ** copy of this software and associated documentation files (the "Software"), + ** to deal in the Software without restriction, including without limitation + ** the rights to use, copy, modify, merge, publish, distribute, sublicense, + ** and/or sell copies of the Software, and to permit persons to whom the + ** Software is furnished to do so, subject to the following conditions: + ** + ** The above copyright notice and this permission notice shall be included in + ** all copies or substantial portions of the Software. + ** + ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + ** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + ** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + ** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + ** FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + ** DEALINGS IN THE SOFTWARE. + */ + +#ifndef GFXRECON_ENCODE_VULKAN_CAPTURE_COMMON_H +#define GFXRECON_ENCODE_VULKAN_CAPTURE_COMMON_H + +#include "encode/parameter_encoder.h" +#include "vulkan/vulkan.h" +#include "format/format.h" +#include "format/platform_types.h" +#include "vulkan_capture_manager.h" +#include "vulkan_state_writer.h" + +GFXRECON_BEGIN_NAMESPACE(gfxrecon) +GFXRECON_BEGIN_NAMESPACE(encode) + +void CommonWriteCreateHardwareBufferCmd(format::ThreadId thread_id, + format::HandleId memory_id, + AHardwareBuffer* hardware_buffer, + const std::vector& plane_info, + VulkanCaptureManager* vulkan_capture_manager, + VulkanStateWriter* vulkan_state_writer); + +void CommonProcessHardwareBuffer(format::ThreadId thread_id, + format::HandleId memory_id, + AHardwareBuffer* hardware_buffer, + size_t allocation_size, + VulkanCaptureManager* vulkan_capture_manager, + VulkanStateWriter* vulkan_state_writer); + +GFXRECON_END_NAMESPACE(encode) +GFXRECON_END_NAMESPACE(gfxrecon) + +#endif // GFXRECON_ENCODE_VULKAN_CAPTURE_COMMON_H diff --git a/framework/encode/vulkan_capture_manager.cpp b/framework/encode/vulkan_capture_manager.cpp index 8be1a4e8c1..0b7f7ebe2c 100644 --- a/framework/encode/vulkan_capture_manager.cpp +++ b/framework/encode/vulkan_capture_manager.cpp @@ -33,6 +33,7 @@ #include "encode/vulkan_handle_wrapper_util.h" #include "encode/vulkan_state_writer.h" +#include "encode/vulkan_capture_common.h" #include "format/format_util.h" #include "generated/generated_vulkan_struct_handle_wrappers.h" #include "graphics/vulkan_check_buffer_references.h" @@ -219,74 +220,6 @@ void VulkanCaptureManager::WriteResizeWindowCmd2(format::HandleId s } } -void VulkanCaptureManager::WriteCreateHardwareBufferCmd(format::HandleId memory_id, - AHardwareBuffer* buffer, - const std::vector& plane_info) -{ - if (IsCaptureModeWrite()) - { -#if defined(VK_USE_PLATFORM_ANDROID_KHR) - assert(buffer != nullptr); - - format::CreateHardwareBufferCommandHeader create_buffer_cmd; - - auto thread_data = GetThreadData(); - assert(thread_data != nullptr); - - create_buffer_cmd.meta_header.block_header.type = format::BlockType::kMetaDataBlock; - create_buffer_cmd.meta_header.block_header.size = format::GetMetaDataBlockBaseSize(create_buffer_cmd); - create_buffer_cmd.meta_header.meta_data_id = format::MakeMetaDataId( - format::ApiFamilyId::ApiFamily_Vulkan, format::MetaDataType::kCreateHardwareBufferCommand); - create_buffer_cmd.thread_id = thread_data->thread_id_; - create_buffer_cmd.memory_id = memory_id; - create_buffer_cmd.buffer_id = reinterpret_cast(buffer); - - // Get AHB description data. - AHardwareBuffer_Desc ahb_desc = {}; - AHardwareBuffer_describe(buffer, &ahb_desc); - - create_buffer_cmd.format = ahb_desc.format; - create_buffer_cmd.width = ahb_desc.width; - create_buffer_cmd.height = ahb_desc.height; - create_buffer_cmd.stride = ahb_desc.stride; - create_buffer_cmd.usage = ahb_desc.usage; - create_buffer_cmd.layers = ahb_desc.layers; - - size_t planes_size = 0; - - if (plane_info.empty()) - { - create_buffer_cmd.planes = 0; - } - else - { - create_buffer_cmd.planes = static_cast(plane_info.size()); - // Update size of packet with size of plane info. - planes_size = sizeof(plane_info[0]) * plane_info.size(); - create_buffer_cmd.meta_header.block_header.size += planes_size; - } - - { - if (planes_size > 0) - { - CombineAndWriteToFile( - { { &create_buffer_cmd, sizeof(create_buffer_cmd) }, { plane_info.data(), planes_size } }); - } - else - { - WriteToFile(&create_buffer_cmd, sizeof(create_buffer_cmd)); - } - } -#else - GFXRECON_UNREFERENCED_PARAMETER(memory_id); - GFXRECON_UNREFERENCED_PARAMETER(buffer); - GFXRECON_UNREFERENCED_PARAMETER(plane_info); - - GFXRECON_LOG_ERROR("Skipping create AHardwareBuffer command write for unsupported platform"); -#endif - } -} - void VulkanCaptureManager::WriteDestroyHardwareBufferCmd(AHardwareBuffer* buffer) { if (IsCaptureModeWrite()) @@ -1722,179 +1655,51 @@ VkMemoryPropertyFlags VulkanCaptureManager::GetMemoryProperties(vulkan_wrappers: return memory_properties->memoryTypes[memory_type_index].propertyFlags; } -void VulkanCaptureManager::ProcessReferenceToAndroidHardwareBuffer(VkDevice device, AHardwareBuffer* hardware_buffer) +void VulkanCaptureManager::ProcessHardwareBuffer(format::ThreadId thread_id, + AHardwareBuffer* hardware_buffer, + VkDevice device) { #if defined(VK_USE_PLATFORM_ANDROID_KHR) - assert(hardware_buffer != nullptr); - auto device_wrapper = vulkan_wrappers::GetWrapper(device); - VkDevice device_unwrapped = device_wrapper->handle; - auto device_table = vulkan_wrappers::GetDeviceTable(device); - auto entry = hardware_buffers_.find(hardware_buffer); - if (entry == hardware_buffers_.end()) + if (entry != hardware_buffers_.end()) { - // If this is the first device memory object to reference the hardware buffer, write a buffer creation - // command to the capture file and setup memory tracking. - - std::vector plane_info; - - AHardwareBuffer_Desc desc; - AHardwareBuffer_describe(hardware_buffer, &desc); - - if ((desc.usage & AHARDWAREBUFFER_USAGE_CPU_READ_MASK) != 0) - { - void* data = nullptr; - int result = -1; - - // The multi-plane functions are declared for API 26, but are only available to link with API 29. So, this - // could be turned into a run-time check dependent on dlsym returning a valid pointer for - // AHardwareBuffer_lockPlanes. -#if __ANDROID_API__ >= 29 - AHardwareBuffer_Planes ahb_planes; - result = AHardwareBuffer_lockPlanes( - hardware_buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, -1, nullptr, &ahb_planes); - if (result == 0) - { - data = ahb_planes.planes[0].data; - - for (uint32_t i = 0; i < ahb_planes.planeCount; ++i) - { - format::HardwareBufferPlaneInfo ahb_plane_info; - ahb_plane_info.offset = - reinterpret_cast(ahb_planes.planes[i].data) - reinterpret_cast(data); - ahb_plane_info.pixel_stride = ahb_planes.planes[i].pixelStride; - ahb_plane_info.row_pitch = ahb_planes.planes[i].rowStride; - plane_info.emplace_back(std::move(ahb_plane_info)); - } - } - else - { - GFXRECON_LOG_WARNING("AHardwareBuffer_lockPlanes failed: AHardwareBuffer_lock will be used instead"); - } -#endif - - // Only store buffer IDs and reference count if a creation command is written to the capture file. - format::HandleId memory_id = GetUniqueId(); - - HardwareBufferInfo& ahb_info = hardware_buffers_[hardware_buffer]; - ahb_info.memory_id = memory_id; - ahb_info.reference_count = 0; - - // Write CreateHardwareBufferCmd with or without the AHB payload - WriteCreateHardwareBufferCmd(memory_id, hardware_buffer, plane_info); - - // Query the AHB size - VkAndroidHardwareBufferPropertiesANDROID properties = { - VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID - }; - properties.pNext = nullptr; - - VkResult vk_result = - device_table->GetAndroidHardwareBufferPropertiesANDROID(device_unwrapped, hardware_buffer, &properties); - - if (vk_result == VK_SUCCESS) - { - const size_t ahb_size = properties.allocationSize; - assert(ahb_size); - - // If AHardwareBuffer_lockPlanes() failed (or is not available) try AHardwareBuffer_lock() - if (result != 0) - { - result = - AHardwareBuffer_lock(hardware_buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, -1, nullptr, &data); - } - - if (result == 0 && data != nullptr) - { - WriteFillMemoryCmd(memory_id, 0, ahb_size, data); - - // Track the memory with the PageGuardManager - if ((GetMemoryTrackingMode() == CaptureSettings::MemoryTrackingMode::kPageGuard || - GetMemoryTrackingMode() == CaptureSettings::MemoryTrackingMode::kUserfaultfd) && - GetPageGuardTrackAhbMemory()) - { - GFXRECON_CHECK_CONVERSION_DATA_LOSS(size_t, ahb_size); - - util::PageGuardManager* manager = util::PageGuardManager::Get(); - assert(manager != nullptr); - - manager->AddTrackedMemory(memory_id, - data, - 0, - static_cast(ahb_size), - util::PageGuardManager::kNullShadowHandle, - false, // No shadow memory for the imported AHB memory. - false); // Write watch is not supported for this case. - } - - result = AHardwareBuffer_unlock(hardware_buffer, nullptr); - if (result != 0) - { - GFXRECON_LOG_ERROR("AHardwareBuffer_unlock failed"); - } - } - else - { - GFXRECON_LOG_ERROR( - "AHardwareBuffer_lock failed: hardware buffer data will be omitted from the capture file"); - - // Dump zeros for AHB payload. - std::vector zeros(ahb_size, 0); - WriteFillMemoryCmd(memory_id, 0, ahb_size, zeros.data()); - } - } - else - { - GFXRECON_LOG_ERROR( - "GetAndroidHardwareBufferPropertiesANDROID failed: hardware buffer data will be omitted " - "from the capture file"); - - // In case AHardwareBuffer_lockPlanes() succeeded - if (result == 0) - { - result = AHardwareBuffer_unlock(hardware_buffer, nullptr); - if (result != 0) - { - GFXRECON_LOG_ERROR("AHardwareBuffer_unlock failed"); - } - } - } - } - else - { - // The AHB is not CPU-readable - // Only store buffer IDs and reference count if a creation command is written to the capture file. - format::HandleId memory_id = GetUniqueId(); - - HardwareBufferInfo& ahb_info = hardware_buffers_[hardware_buffer]; - ahb_info.memory_id = memory_id; - ahb_info.reference_count = 0; + return; + } - WriteCreateHardwareBufferCmd(memory_id, hardware_buffer, plane_info); + format::HandleId memory_id = GetUniqueId(); - GFXRECON_LOG_WARNING("AHardwareBuffer cannot be read: hardware buffer data will be omitted " - "from the capture file"); + HardwareBufferInfo& ahb_info = hardware_buffers_[hardware_buffer]; + ahb_info.memory_id = memory_id; + ahb_info.reference_count = 0; - VkAndroidHardwareBufferPropertiesANDROID properties = { - VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID - }; - properties.pNext = nullptr; + auto device_wrapper = vulkan_wrappers::GetWrapper(device); + VkDevice device_unwrapped = device_wrapper->handle; + auto device_table = vulkan_wrappers::GetDeviceTable(device); - VkResult vk_result = - device_table->GetAndroidHardwareBufferPropertiesANDROID(device_unwrapped, hardware_buffer, &properties); + // Query the AHB size + VkAndroidHardwareBufferPropertiesANDROID properties = { + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID + }; + properties.pNext = nullptr; - if (vk_result == VK_SUCCESS) - { - const size_t ahb_size = properties.allocationSize; + VkResult vk_result = + device_table->GetAndroidHardwareBufferPropertiesANDROID(device_unwrapped, hardware_buffer, &properties); - // Dump zeros for AHB payload. - std::vector zeros(ahb_size, 0); - WriteFillMemoryCmd(memory_id, 0, zeros.size(), zeros.data()); - } - } + if (vk_result == VK_SUCCESS) + { + const size_t ahb_size = properties.allocationSize; + assert(ahb_size); + CommonProcessHardwareBuffer(thread_id, memory_id, hardware_buffer, ahb_size, this, nullptr); + } + else + { + GFXRECON_LOG_ERROR("GetAndroidHardwareBufferPropertiesANDROID failed: hardware buffer data will be omitted " + "from the capture file"); } #else + GFXRECON_UNREFERENCED_PARAMETER(thread_id); GFXRECON_UNREFERENCED_PARAMETER(hardware_buffer); + GFXRECON_UNREFERENCED_PARAMETER(device); #endif } @@ -1908,7 +1713,10 @@ void VulkanCaptureManager::ProcessImportAndroidHardwareBuffer(VkDevice d auto memory_wrapper = vulkan_wrappers::GetWrapper(memory); assert((memory_wrapper != nullptr) && (hardware_buffer != nullptr)); - ProcessReferenceToAndroidHardwareBuffer(device, hardware_buffer); + auto thread_data = GetThreadData(); + assert(thread_data != nullptr); + + ProcessHardwareBuffer(thread_data->thread_id_, hardware_buffer, device); auto entry = hardware_buffers_.find(hardware_buffer); GFXRECON_ASSERT(entry != hardware_buffers_.end()); @@ -2701,9 +2509,13 @@ void VulkanCaptureManager::PreProcess_vkGetAndroidHardwareBufferPropertiesANDROI GFXRECON_UNREFERENCED_PARAMETER(pProperties); #if defined(VK_USE_PLATFORM_ANDROID_KHR) auto device_wrapper = vulkan_wrappers::GetWrapper(device); + + auto thread_data = GetThreadData(); + assert(thread_data != nullptr); + if (hardware_buffer != nullptr) { - ProcessReferenceToAndroidHardwareBuffer(device, const_cast(hardware_buffer)); + ProcessHardwareBuffer(thread_data->thread_id_, const_cast(hardware_buffer), device); } #else GFXRECON_UNREFERENCED_PARAMETER(device); diff --git a/framework/encode/vulkan_capture_manager.h b/framework/encode/vulkan_capture_manager.h index c5cd102114..02cbae96d0 100644 --- a/framework/encode/vulkan_capture_manager.h +++ b/framework/encode/vulkan_capture_manager.h @@ -1625,9 +1625,6 @@ class VulkanCaptureManager : public ApiCaptureManager uint32_t width, uint32_t height, VkSurfaceTransformFlagBitsKHR pre_transform); - void WriteCreateHardwareBufferCmd(format::HandleId memory_id, - AHardwareBuffer* buffer, - const std::vector& plane_info); void WriteDestroyHardwareBufferCmd(AHardwareBuffer* buffer); void WriteSetDevicePropertiesCommand(format::HandleId physical_device_id, const VkPhysicalDeviceProperties& properties); @@ -1653,7 +1650,7 @@ class VulkanCaptureManager : public ApiCaptureManager VkMemoryPropertyFlags GetMemoryProperties(vulkan_wrappers::DeviceWrapper* device_wrapper, uint32_t memory_type_index); - void ProcessReferenceToAndroidHardwareBuffer(VkDevice device, AHardwareBuffer* hardware_buffer); + void ProcessHardwareBuffer(format::ThreadId thread_id, AHardwareBuffer* hardware_buffer, VkDevice device); void ProcessImportAndroidHardwareBuffer(VkDevice device, VkDeviceMemory memory, AHardwareBuffer* hardware_buffer); void ReleaseAndroidHardwareBuffer(AHardwareBuffer* hardware_buffer); bool CheckBindAlignment(VkDeviceSize memoryOffset); diff --git a/framework/encode/vulkan_state_writer.cpp b/framework/encode/vulkan_state_writer.cpp index 66d813bba5..b8bd96d9a0 100644 --- a/framework/encode/vulkan_state_writer.cpp +++ b/framework/encode/vulkan_state_writer.cpp @@ -26,6 +26,7 @@ #include "encode/struct_pointer_encoder.h" #include "encode/vulkan_handle_wrappers.h" #include "encode/vulkan_state_info.h" +#include "encode/vulkan_capture_common.h" #include "format/format.h" #include "format/format_util.h" #include "util/logging.h" @@ -1346,7 +1347,14 @@ void VulkanStateWriter::WriteDeviceMemoryState(const VulkanStateTable& state_tab for (auto hardware_buffer : hardware_buffers) { const vulkan_wrappers::DeviceMemoryWrapper* wrapper = hardware_buffer.second; - ProcessHardwareBuffer(wrapper->hardware_buffer_memory_id, wrapper->hardware_buffer, wrapper->allocation_size); + CommonProcessHardwareBuffer(thread_id_, + wrapper->hardware_buffer_memory_id, + wrapper->hardware_buffer, + wrapper->allocation_size, + nullptr, + this); + + ++blocks_written_; } #endif @@ -1971,105 +1979,9 @@ void VulkanStateWriter::WriteDeferredOperationJoinCommand(format::HandleId devic parameter_stream_.Clear(); } -void VulkanStateWriter::ProcessHardwareBuffer(format::HandleId memory_id, - AHardwareBuffer* hardware_buffer, - VkDeviceSize allocation_size) +bool VulkanStateWriter::OutputStreamWrite(const void* data, size_t len) { -#if defined(VK_USE_PLATFORM_ANDROID_KHR) - void* data = nullptr; - int result = -1; - - std::vector plane_info; - - AHardwareBuffer_Desc desc; - AHardwareBuffer_describe(hardware_buffer, &desc); - - if ((desc.usage & AHARDWAREBUFFER_USAGE_CPU_READ_MASK) != 0) - { - // The multi-plane functions are declared for API 26, but are only available to link with API 29. So, this - // could be turned into a run-time check dependent on dlsym returning a valid pointer for - // AHardwareBuffer_lockPlanes. -#if __ANDROID_API__ >= 29 - AHardwareBuffer_Planes ahb_planes; - result = - AHardwareBuffer_lockPlanes(hardware_buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, -1, nullptr, &ahb_planes); - if (result == 0) - { - data = ahb_planes.planes[0].data; - - for (uint32_t i = 0; i < ahb_planes.planeCount; ++i) - { - format::HardwareBufferPlaneInfo ahb_plane_info; - ahb_plane_info.offset = - reinterpret_cast(ahb_planes.planes[i].data) - reinterpret_cast(data); - ahb_plane_info.pixel_stride = ahb_planes.planes[i].pixelStride; - ahb_plane_info.row_pitch = ahb_planes.planes[i].rowStride; - plane_info.emplace_back(std::move(ahb_plane_info)); - } - } - else - { - GFXRECON_LOG_WARNING("AHardwareBuffer_lockPlanes failed: AHardwareBuffer_lock will be used instead"); - } -#endif - - // Write CreateHardwareBufferCmd with or without the AHB payload - WriteCreateHardwareBufferCmd(memory_id, hardware_buffer, plane_info); - - // If AHardwareBuffer_lockPlanes failed (or is not available) try AHardwareBuffer_lock - if (result != 0) - { - result = AHardwareBuffer_lock(hardware_buffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, -1, nullptr, &data); - } - - if (result == 0) - { - if (data == nullptr) - { - GFXRECON_LOG_WARNING("AHardwareBuffer_lock returned nullptr for data pointer"); - - // Dump zeros for AHB payload. - std::vector zeros(allocation_size, 0); - WriteFillMemoryCmd(memory_id, 0, zeros.size(), zeros.data()); - } - else - { - WriteFillMemoryCmd(memory_id, 0, allocation_size, data); - } - - result = AHardwareBuffer_unlock(hardware_buffer, nullptr); - if (result != 0) - { - GFXRECON_LOG_ERROR("AHardwareBuffer_unlock failed"); - } - } - else - { - GFXRECON_LOG_ERROR( - "AHardwareBuffer_lock failed: hardware buffer data will be omitted from the capture file"); - - // Dump zeros for AHB payload. - std::vector zeros(allocation_size, 0); - WriteFillMemoryCmd(memory_id, 0, zeros.size(), zeros.data()); - } - } - else - { - // Write CreateHardwareBufferCmd without the AHB payload - WriteCreateHardwareBufferCmd(memory_id, hardware_buffer, plane_info); - - // The AHB is not CPU-readable - // Dump zeros for AHB payload. - std::vector zeros(allocation_size, 0); - WriteFillMemoryCmd(memory_id, 0, zeros.size(), zeros.data()); - - GFXRECON_LOG_WARNING("AHardwareBuffer cannot be read: hardware buffer data will be omitted " - "from the capture file"); - } -#else - GFXRECON_UNREFERENCED_PARAMETER(memory_id); - GFXRECON_UNREFERENCED_PARAMETER(hardware_buffer); -#endif + return output_stream_->Write(data, len); } void VulkanStateWriter::ProcessBufferMemory(const vulkan_wrappers::DeviceWrapper* device_wrapper, @@ -3982,65 +3894,6 @@ void VulkanStateWriter::WriteResizeWindowCmd2(format::HandleId surf ++blocks_written_; } -// TODO: This is the same code used by CaptureManager to write command data. It could be moved to a format -// utility. -void VulkanStateWriter::WriteCreateHardwareBufferCmd(format::HandleId memory_id, - AHardwareBuffer* hardware_buffer, - const std::vector& plane_info) -{ -#if defined(VK_USE_PLATFORM_ANDROID_KHR) - assert(hardware_buffer != nullptr); - - format::CreateHardwareBufferCommandHeader create_buffer_cmd; - - create_buffer_cmd.meta_header.block_header.type = format::BlockType::kMetaDataBlock; - create_buffer_cmd.meta_header.block_header.size = format::GetMetaDataBlockBaseSize(create_buffer_cmd); - create_buffer_cmd.meta_header.meta_data_id = format::MakeMetaDataId( - format::ApiFamilyId::ApiFamily_Vulkan, format::MetaDataType::kCreateHardwareBufferCommand); - create_buffer_cmd.thread_id = thread_id_; - create_buffer_cmd.memory_id = memory_id; - create_buffer_cmd.buffer_id = reinterpret_cast(hardware_buffer); - - // Get AHB description data. - AHardwareBuffer_Desc ahb_desc = {}; - AHardwareBuffer_describe(hardware_buffer, &ahb_desc); - - create_buffer_cmd.format = ahb_desc.format; - create_buffer_cmd.width = ahb_desc.width; - create_buffer_cmd.height = ahb_desc.height; - create_buffer_cmd.stride = ahb_desc.stride; - create_buffer_cmd.usage = ahb_desc.usage; - create_buffer_cmd.layers = ahb_desc.layers; - - size_t planes_size = 0; - - if (plane_info.empty()) - { - create_buffer_cmd.planes = 0; - } - else - { - create_buffer_cmd.planes = static_cast(plane_info.size()); - // Update size of packet with compressed or uncompressed data size. - planes_size = sizeof(plane_info[0]) * plane_info.size(); - create_buffer_cmd.meta_header.block_header.size += planes_size; - } - - output_stream_->Write(&create_buffer_cmd, sizeof(create_buffer_cmd)); - - if (planes_size > 0) - { - output_stream_->Write(plane_info.data(), planes_size); - } - - ++blocks_written_; -#else - GFXRECON_UNREFERENCED_PARAMETER(memory_id); - GFXRECON_UNREFERENCED_PARAMETER(hardware_buffer); - GFXRECON_UNREFERENCED_PARAMETER(plane_info); -#endif -} - void VulkanStateWriter::WriteSetDevicePropertiesCommand(format::HandleId physical_device_id, const VkPhysicalDeviceProperties& properties) { diff --git a/framework/encode/vulkan_state_writer.h b/framework/encode/vulkan_state_writer.h index 131f938182..3cd654a562 100644 --- a/framework/encode/vulkan_state_writer.h +++ b/framework/encode/vulkan_state_writer.h @@ -64,6 +64,10 @@ class VulkanStateWriter uint64_t WriteAssets(const VulkanStateTable& state_table); + bool OutputStreamWrite(const void* data, size_t len); + + void WriteFillMemoryCmd(format::HandleId memory_id, VkDeviceSize offset, VkDeviceSize size, const void* data); + private: // Data structures for processing resource memory snapshots. struct BufferSnapshotInfo @@ -160,9 +164,6 @@ class VulkanStateWriter void WriteDeferredOperationJoinCommand(format::HandleId device_id, format::HandleId deferred_operation_id); - void - ProcessHardwareBuffer(format::HandleId memory_id, AHardwareBuffer* hardware_buffer, VkDeviceSize allocation_size); - void ProcessBufferMemory(const vulkan_wrappers::DeviceWrapper* device_wrapper, const std::vector& buffer_snapshot_info, graphics::VulkanResourcesUtil& resource_util); @@ -300,8 +301,6 @@ class VulkanStateWriter util::MemoryOutputStream* parameter_buffer, util::FileOutputStream* output_stream = nullptr); - void WriteFillMemoryCmd(format::HandleId memory_id, VkDeviceSize offset, VkDeviceSize size, const void* data); - void WriteResizeWindowCmd(format::HandleId surface_id, uint32_t width, uint32_t height); void WriteResizeWindowCmd2(format::HandleId surface_id, @@ -309,10 +308,6 @@ class VulkanStateWriter uint32_t height, VkSurfaceTransformFlagBitsKHR pre_transform); - void WriteCreateHardwareBufferCmd(format::HandleId memory_id, - AHardwareBuffer* hardware_buffer, - const std::vector& plane_info); - void WriteSetDevicePropertiesCommand(format::HandleId physical_device_id, const VkPhysicalDeviceProperties& properties);