Mercurial > games > semicongine
diff fuhtark_test/Vulkan-Headers-1.4.334/include/vulkan/vulkan_funcs.hpp @ 1501:f40d9d814c08 default tip main
did: correct vulkan-api generator
| author | sam <sam@basx.dev> |
|---|---|
| date | Wed, 26 Nov 2025 23:34:29 +0700 |
| parents | |
| children |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/fuhtark_test/Vulkan-Headers-1.4.334/include/vulkan/vulkan_funcs.hpp Wed Nov 26 23:34:29 2025 +0700 @@ -0,0 +1,32143 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_FUNCS_HPP +#define VULKAN_FUNCS_HPP + +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private, include "vulkan/vulkan.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkInstance *>( pInstance ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Instance>::type + createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); +# endif + + Instance instance; + Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkInstance *>( &instance ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + + return detail::createResultValueType( result, std::move( instance ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateInstance.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Instance, Dispatch>>::type + createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function <vkCreateInstance> requires <VK_VERSION_1_0>" ); +# endif + + Instance instance; + Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkInstance *>( &instance ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<Instance, Dispatch>( instance, detail::ObjectDestroy<detail::NoParent, Dispatch>( allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyInstance.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyInstance, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyInstance.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function <vkDestroyInstance> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkEnumeratePhysicalDevices( static_cast<VkInstance>( m_instance ), pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template <typename PhysicalDeviceAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, PhysicalDevice>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast<Result>( + d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return detail::createResultValueType( result, std::move( physicalDevices ) ); + } + + // wrapper function for command vkEnumeratePhysicalDevices, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDevices.html + template <typename PhysicalDeviceAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceAllocator::value_type, PhysicalDevice>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type + Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function <vkEnumeratePhysicalDevices> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast<Result>( + d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return detail::createResultValueType( result, std::move( physicalDevices ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" ); +# endif + + PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); + + return features; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties( Format format, FormatProperties * pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" ); +# endif + + FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); + + return formatProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, + ImageType type, + ImageTiling tiling, + ImageUsageFlags usage, + ImageCreateFlags flags, + ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkImageTiling>( tiling ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageCreateFlags>( flags ), + reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( + Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" ); +# endif + + ImageFormatProperties imageFormatProperties; + Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkImageTiling>( tiling ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageCreateFlags>( flags ), + reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + + return detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" ); +# endif + + PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); + + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template <typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, QueueFamilyProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html + template <typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyPropertiesAllocator::value_type, QueueFamilyProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> + PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" ); +# endif + + PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); + + return memoryProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetInstanceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetInstanceProcAddr.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE PFN_VoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" ); +# endif + + PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceProcAddr, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceProcAddr.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE PFN_VoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" ); +# endif + + PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDevice( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDevice *>( pDevice ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Device>::type + PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); +# endif + + Device device; + Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDevice *>( &device ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + + return detail::createResultValueType( result, std::move( device ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDevice.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Device, Dispatch>>::type + PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function <vkCreateDevice> requires <VK_VERSION_1_0>" ); +# endif + + Device device; + Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDevice *>( &device ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Device, Dispatch>( device, detail::ObjectDestroy<detail::NoParent, Dispatch>( allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDevice.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDevice, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDevice.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function <vkDestroyDevice> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template <typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, ExtensionProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type + enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateInstanceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceExtensionProperties.html + template <typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, ExtensionProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type + enumerateInstanceExtensionProperties( Optional<const std::string> layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template <typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, ExtensionProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = + static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateDeviceExtensionProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceExtensionProperties.html + template <typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ExtensionPropertiesAllocator::value_type, ExtensionProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = + static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template <typename LayerPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, LayerProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type + enumerateInstanceLayerProperties( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<LayerProperties, LayerPropertiesAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateInstanceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceLayerProperties.html + template <typename LayerPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, LayerProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template <typename LayerPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, LayerProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<LayerProperties, LayerPropertiesAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( + d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkEnumerateDeviceLayerProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateDeviceLayerProperties.html + template <typename LayerPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename LayerPropertiesAllocator::value_type, LayerProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type + PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( + d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue( static_cast<VkDevice>( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceQueue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function <vkGetDeviceQueue> requires <VK_VERSION_1_0>" ); +# endif + + Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); + + return queue; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::submit( uint32_t submitCount, const SubmitInfo * pSubmits, Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkQueueSubmit( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Queue::submit( ArrayProxy<const SubmitInfo> const & submits, Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) ) ); + } +#else + // wrapper function for command vkQueueWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueWaitIdle.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) ) ); + } +#else + // wrapper function for command vkDeviceWaitIdle, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeviceWaitIdle.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo * pAllocateInfo, + const AllocationCallbacks * pAllocator, + DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAllocateMemory( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DeviceMemory>::type + Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); +# endif + + DeviceMemory memory; + Result result = static_cast<Result>( d.vkAllocateMemory( m_device, + reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + + return detail::createResultValueType( result, std::move( memory ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeviceMemory, Dispatch>>::type + Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function <vkAllocateMemory> requires <VK_VERSION_1_0>" ); +# endif + + DeviceMemory memory; + Result result = static_cast<Result>( d.vkAllocateMemory( m_device, + reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDeviceMemory *>( &memory ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); + + return detail::createResultValueType( result, UniqueHandle<DeviceMemory, Dispatch>( memory, detail::ObjectFree<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); +# endif + + d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void( Device::free )( DeviceMemory memory, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void( Device::free )( DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function <vkFreeMemory> requires <VK_VERSION_1_0>" ); +# endif + + d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( + DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkMapMemory( static_cast<VkDevice>( m_device ), + static_cast<VkDeviceMemory>( memory ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkDeviceSize>( size ), + static_cast<VkMemoryMapFlags>( flags ), + ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type + Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" ); +# endif + + void * pData; + Result result = static_cast<Result>( d.vkMapMemory( m_device, + static_cast<VkDeviceMemory>( memory ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkDeviceSize>( size ), + static_cast<VkMemoryMapFlags>( flags ), + &pData ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + + return detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ) ); + } + + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkFlushMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFlushMappedMemoryRanges, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFlushMappedMemoryRanges.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" ); +# endif + + Result result = + static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkInvalidateMappedMemoryRanges( static_cast<VkDevice>( m_device ), memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkInvalidateMappedMemoryRanges, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInvalidateMappedMemoryRanges.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize * pCommittedMemoryInBytes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMemoryCommitment( + static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryCommitment, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryCommitment.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" ); +# endif + + DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); + + return committedMemoryInBytes; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBindBufferMemory( + static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); + } +#else + // wrapper function for command vkBindBufferMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBindImageMemory( + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); + } +#else + // wrapper function for command vkBindImageMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements( + static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" ); +# endif + + MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); + + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getImageMemoryRequirements( Image image, MemoryRequirements * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements( + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" ); +# endif + + MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); + + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, + uint32_t * pSparseMemoryRequirementCount, + SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), + static_cast<VkImage>( image ), + pSparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template <typename SparseImageMemoryRequirementsAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, SparseImageMemoryRequirements>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> + Device::getImageSparseMemoryRequirements( Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast<VkImage>( image ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements.html + template <typename SparseImageMemoryRequirementsAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirementsAllocator::value_type, SparseImageMemoryRequirements>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> + Device::getImageSparseMemoryRequirements( Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast<VkImage>( image ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, + ImageType type, + SampleCountFlagBits samples, + ImageUsageFlags usage, + ImageTiling tiling, + uint32_t * pPropertyCount, + SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkSampleCountFlagBits>( samples ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageTiling>( tiling ), + pPropertyCount, + reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template <typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, SparseImageFormatProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> + PhysicalDevice::getSparseImageFormatProperties( + Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && + "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkSampleCountFlagBits>( samples ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageTiling>( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkSampleCountFlagBits>( samples ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageTiling>( tiling ), + &propertyCount, + reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html + template <typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageFormatPropertiesAllocator::value_type, SparseImageFormatProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> + PhysicalDevice::getSparseImageFormatProperties( Format format, + ImageType type, + SampleCountFlagBits samples, + ImageUsageFlags usage, + ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && + "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkSampleCountFlagBits>( samples ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageTiling>( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkSampleCountFlagBits>( samples ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageTiling>( tiling ), + &propertyCount, + reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo * pBindInfo, Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkQueueBindSparse( + static_cast<VkQueue>( m_queue ), bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueBindSparse, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBindSparse.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Queue::bindSparse( ArrayProxy<const BindSparseInfo> const & bindInfo, Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateFence( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkFence *>( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Fence>::type + Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); +# endif + + Fence fence; + Result result = static_cast<Result>( d.vkCreateFence( m_device, + reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFence *>( &fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + + return detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence, Dispatch>>::type + Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFence && "Function <vkCreateFence> requires <VK_VERSION_1_0>" ); +# endif + + Fence fence; + Result result = static_cast<Result>( d.vkCreateFence( m_device, + reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFence *>( &fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Fence, Dispatch>( fence, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Fence fence, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFence, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFence.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function <vkDestroyFence> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence * pFences, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkResetFences( static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetFences.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const Fence> const & fences, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( fence ) ) ); + } +#else + // wrapper function for command vkGetFenceStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceStatus.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); + + return static_cast<Result>( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForFences( uint32_t fenceCount, const Fence * pFences, Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkWaitForFences( + static_cast<VkDevice>( m_device ), fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForFences, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForFences.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForFences( ArrayProxy<const Fence> const & fences, Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSemaphore( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Semaphore>::type + Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); +# endif + + Semaphore semaphore; + Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, + reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + + return detail::createResultValueType( result, std::move( semaphore ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Semaphore, Dispatch>>::type + Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function <vkCreateSemaphore> requires <VK_VERSION_1_0>" ); +# endif + + Semaphore semaphore; + Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, + reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSemaphore *>( &semaphore ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<Semaphore, Dispatch>( semaphore, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( + static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( + static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function <vkDestroySemaphore> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateQueryPool( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<QueryPool>::type + Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); +# endif + + QueryPool queryPool; + Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, + reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + + return detail::createResultValueType( result, std::move( queryPool ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<QueryPool, Dispatch>>::type + Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function <vkCreateQueryPool> requires <VK_VERSION_1_0>" ); +# endif + + QueryPool queryPool; + Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, + reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkQueryPool *>( &queryPool ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<QueryPool, Dispatch>( queryPool, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( + static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( + static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function <vkDestroyQueryPool> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + DeviceSize stride, + QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetQueryPoolResults( static_cast<VkDevice>( m_device ), + static_cast<VkQueryPool>( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast<VkDeviceSize>( stride ), + static_cast<VkQueryResultFlags>( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> Device::getQueryPoolResults( + QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, DeviceSize stride, QueryResultFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, + static_cast<VkQueryPool>( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast<void *>( data.data() ), + static_cast<VkDeviceSize>( stride ), + static_cast<VkQueryResultFlags>( flags ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + + return ResultValue<std::vector<DataType, DataTypeAllocator>>( result, std::move( data ) ); + } + + // wrapper function for command vkGetQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueryPoolResults.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( + QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, DeviceSize stride, QueryResultFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, + static_cast<VkQueryPool>( queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast<void *>( &data ), + static_cast<VkDeviceSize>( stride ), + static_cast<VkQueryResultFlags>( flags ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { Result::eSuccess, Result::eNotReady } ); + + return ResultValue<DataType>( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateBuffer( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkBuffer *>( pBuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Buffer>::type + Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); +# endif + + Buffer buffer; + Result result = static_cast<Result>( d.vkCreateBuffer( m_device, + reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkBuffer *>( &buffer ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + + return detail::createResultValueType( result, std::move( buffer ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Buffer, Dispatch>>::type + Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function <vkCreateBuffer> requires <VK_VERSION_1_0>" ); +# endif + + Buffer buffer; + Result result = static_cast<Result>( d.vkCreateBuffer( m_device, + reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkBuffer *>( &buffer ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Buffer, Dispatch>( buffer, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function <vkDestroyBuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateImage( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkImage *>( pImage ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Image>::type + Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); +# endif + + Image image; + Result result = static_cast<Result>( d.vkCreateImage( m_device, + reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkImage *>( &image ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + + return detail::createResultValueType( result, std::move( image ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Image, Dispatch>>::type + Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImage && "Function <vkCreateImage> requires <VK_VERSION_1_0>" ); +# endif + + Image image; + Result result = static_cast<Result>( d.vkCreateImage( m_device, + reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkImage *>( &image ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Image, Dispatch>( image, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Image image, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Image image, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function <vkDestroyImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, + const ImageSubresource * pSubresource, + SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ), + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource *>( pSubresource ), + reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, + const ImageSubresource & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" ); +# endif + + SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource *>( &subresource ), + reinterpret_cast<VkSubresourceLayout *>( &layout ) ); + + return layout; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateImageView( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkImageView *>( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ImageView>::type + Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); +# endif + + ImageView view; + Result result = static_cast<Result>( d.vkCreateImageView( m_device, + reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkImageView *>( &view ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + + return detail::createResultValueType( result, std::move( view ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ImageView, Dispatch>>::type + Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function <vkCreateImageView> requires <VK_VERSION_1_0>" ); +# endif + + ImageView view; + Result result = static_cast<Result>( d.vkCreateImageView( m_device, + reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkImageView *>( &view ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); + + return detail::createResultValueType( result, UniqueHandle<ImageView, Dispatch>( view, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( + static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( + static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyImageView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyImageView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function <vkDestroyImageView> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateCommandPool( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<CommandPool>::type + Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); +# endif + + CommandPool commandPool; + Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, + reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + + return detail::createResultValueType( result, std::move( commandPool ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CommandPool, Dispatch>>::type + Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function <vkCreateCommandPool> requires <VK_VERSION_1_0>" ); +# endif + + CommandPool commandPool; + Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, + reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCommandPool *>( &commandPool ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<CommandPool, Dispatch>( commandPool, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( + static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( + static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function <vkDestroyCommandPool> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, + CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkResetCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); + } +#else + // wrapper function for command vkResetCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" ); +# endif + + Result result = + static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo * pAllocateInfo, + CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAllocateCommandBuffers( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), + reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template <typename CommandBufferAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, CommandBuffer>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast<Result>( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + + return detail::createResultValueType( result, std::move( commandBuffers ) ); + } + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template <typename CommandBufferAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, CommandBuffer>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type + Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); + Result result = static_cast<Result>( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + + return detail::createResultValueType( result, std::move( commandBuffers ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template <typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast<Result>( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + detail::PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); + for ( auto const & commandBuffer : commandBuffers ) + { + uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); + } + return detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); + } + + // wrapper function for command vkAllocateCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateCommandBuffers.html + template <typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if<std::is_same<typename CommandBufferAllocator::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function <vkAllocateCommandBuffers> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast<Result>( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + detail::PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); + for ( auto const & commandBuffer : commandBuffers ) + { + uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); + } + return detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, + uint32_t commandBufferCount, + const CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), + static_cast<VkCommandPool>( commandPool ), + commandBufferCount, + reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); +# endif + + d.vkFreeCommandBuffers( + m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void( Device::free )( CommandPool commandPool, + uint32_t commandBufferCount, + const CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( static_cast<VkDevice>( m_device ), + static_cast<VkCommandPool>( commandPool ), + commandBufferCount, + reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeCommandBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeCommandBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void( Device::free )( CommandPool commandPool, + ArrayProxy<const CommandBuffer> const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function <vkFreeCommandBuffers> requires <VK_VERSION_1_0>" ); +# endif + + d.vkFreeCommandBuffers( + m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBeginCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBeginCommandBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) ) ); + } +#else + // wrapper function for command vkEndCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEndCommandBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) ) ); + } +#else + // wrapper function for command vkResetCommandBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetCommandBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( CommandBufferResetFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( + Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy * pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( srcBuffer ), + static_cast<VkBuffer>( dstBuffer ), + regionCount, + reinterpret_cast<const VkBufferCopy *>( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast<VkBuffer>( srcBuffer ), + static_cast<VkBuffer>( dstBuffer ), + regions.size(), + reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, + ImageLayout srcImageLayout, + Image dstImage, + ImageLayout dstImageLayout, + uint32_t regionCount, + const ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regionCount, + reinterpret_cast<const VkImageCopy *>( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, + ImageLayout srcImageLayout, + Image dstImage, + ImageLayout dstImageLayout, + ArrayProxy<const ImageCopy> const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdCopyImage( m_commandBuffer, + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regions.size(), + reinterpret_cast<const VkImageCopy *>( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, + Image dstImage, + ImageLayout dstImageLayout, + uint32_t regionCount, + const BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( srcBuffer ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regionCount, + reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, + Image dstImage, + ImageLayout dstImageLayout, + ArrayProxy<const BufferImageCopy> const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast<VkBuffer>( srcBuffer ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regions.size(), + reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, + ImageLayout srcImageLayout, + Buffer dstBuffer, + uint32_t regionCount, + const BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkBuffer>( dstBuffer ), + regionCount, + reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, + ImageLayout srcImageLayout, + Buffer dstBuffer, + ArrayProxy<const BufferImageCopy> const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkBuffer>( dstBuffer ), + regions.size(), + reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + static_cast<VkDeviceSize>( dataSize ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdUpdateBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdateBuffer.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const DataType> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast<const void *>( data.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdFillBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdFillBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + static_cast<VkDeviceSize>( size ), + data ); + } + + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, + PipelineStageFlags dstStageMask, + DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineStageFlags>( srcStageMask ), + static_cast<VkPipelineStageFlags>( dstStageMask ), + static_cast<VkDependencyFlags>( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, + PipelineStageFlags dstStageMask, + DependencyFlags dependencyFlags, + ArrayProxy<const MemoryBarrier> const & memoryBarriers, + ArrayProxy<const BufferMemoryBarrier> const & bufferMemoryBarriers, + ArrayProxy<const ImageMemoryBarrier> const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast<VkPipelineStageFlags>( srcStageMask ), + static_cast<VkPipelineStageFlags>( dstStageMask ), + static_cast<VkDependencyFlags>( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQuery.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQuery( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); + } + + // wrapper function for command vkCmdEndQuery, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQuery.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query ); + } + + // wrapper function for command vkCmdResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); + } + + // wrapper function for command vkCmdWriteTimestamp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); + } + + // wrapper function for command vkCmdCopyQueryPoolResults, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyQueryPoolResults.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Buffer dstBuffer, + DeviceSize dstOffset, + DeviceSize stride, + QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkQueryPool>( queryPool ), + firstQuery, + queryCount, + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + static_cast<VkDeviceSize>( stride ), + static_cast<VkQueryResultFlags>( flags ) ); + } + + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer * pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteCommands( static_cast<VkCommandBuffer>( m_commandBuffer ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteCommands, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteCommands.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateEvent( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkEvent *>( pEvent ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Event>::type + Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); +# endif + + Event event; + Result result = static_cast<Result>( d.vkCreateEvent( m_device, + reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkEvent *>( &event ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + + return detail::createResultValueType( result, std::move( event ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Event, Dispatch>>::type + Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function <vkCreateEvent> requires <VK_VERSION_1_0>" ); +# endif + + Event event; + Result result = static_cast<Result>( d.vkCreateEvent( m_device, + reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkEvent *>( &event ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Event, Dispatch>( event, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Event event, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Event event, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function <vkDestroyEvent> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); + } +#else + // wrapper function for command vkGetEventStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEventStatus.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); + + return static_cast<Result>( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); + } +#else + // wrapper function for command vkSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( event ) ) ); + } +#else + // wrapper function for command vkResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateBufferView( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkBufferView *>( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<BufferView>::type + Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); +# endif + + BufferView view; + Result result = static_cast<Result>( d.vkCreateBufferView( m_device, + reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkBufferView *>( &view ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + + return detail::createResultValueType( result, std::move( view ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferView, Dispatch>>::type + Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function <vkCreateBufferView> requires <VK_VERSION_1_0>" ); +# endif + + BufferView view; + Result result = static_cast<Result>( d.vkCreateBufferView( m_device, + reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkBufferView *>( &view ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); + + return detail::createResultValueType( result, UniqueHandle<BufferView, Dispatch>( view, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( + static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( + static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferView, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferView.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function <vkDestroyBufferView> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateShaderModule( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ShaderModule>::type + Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); +# endif + + ShaderModule shaderModule; + Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, + reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + + return detail::createResultValueType( result, std::move( shaderModule ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ShaderModule, Dispatch>>::type + Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function <vkCreateShaderModule> requires <VK_VERSION_1_0>" ); +# endif + + ShaderModule shaderModule; + Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, + reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderModule *>( &shaderModule ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<ShaderModule, Dispatch>( shaderModule, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( + static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( + static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderModule, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderModule.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function <vkDestroyShaderModule> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreatePipelineCache( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PipelineCache>::type + Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); +# endif + + PipelineCache pipelineCache; + Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, + reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + + return detail::createResultValueType( result, std::move( pipelineCache ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineCache, Dispatch>>::type + Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function <vkCreatePipelineCache> requires <VK_VERSION_1_0>" ); +# endif + + PipelineCache pipelineCache; + Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, + reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<PipelineCache, Dispatch>( pipelineCache, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineCache, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineCache.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function <vkDestroyPipelineCache> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineCacheData( PipelineCache pipelineCache, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getPipelineCacheData( PipelineCache pipelineCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> data; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetPipelineCacheData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineCacheData.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getPipelineCacheData( PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, + uint32_t srcCacheCount, + const PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkMergePipelineCaches( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMergePipelineCaches, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergePipelineCaches.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> const & srcCaches, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkMergePipelineCaches( + m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, + uint32_t createInfoCount, + const ComputePipelineCreateInfo * pCreateInfos, + const AllocationCallbacks * pAllocator, + Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateComputePipelines( static_cast<VkDevice>( m_device ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfoCount, + reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipeline *>( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createComputePipelines( PipelineCache pipelineCache, + ArrayProxy<const ComputePipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createComputePipelines( PipelineCache pipelineCache, + ArrayProxy<const ComputePipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( PipelineCache pipelineCache, + const ComputePipelineCreateInfo & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<Pipeline>( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createComputePipelinesUnique( PipelineCache pipelineCache, + ArrayProxy<const ComputePipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createComputePipelinesUnique( PipelineCache pipelineCache, + ArrayProxy<const ComputePipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateComputePipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateComputePipelines.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( + PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function <vkCreateComputePipelines> requires <VK_VERSION_1_0>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<UniqueHandle<Pipeline, Dispatch>>( + result, UniqueHandle<Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipeline.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function <vkDestroyPipeline> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreatePipelineLayout( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PipelineLayout>::type + Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); +# endif + + PipelineLayout pipelineLayout; + Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, + reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + + return detail::createResultValueType( result, std::move( pipelineLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineLayout, Dispatch>>::type + Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function <vkCreatePipelineLayout> requires <VK_VERSION_1_0>" ); +# endif + + PipelineLayout pipelineLayout; + Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, + reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<PipelineLayout, Dispatch>( pipelineLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function <vkDestroyPipelineLayout> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSampler( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSampler *>( pSampler ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Sampler>::type + Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); +# endif + + Sampler sampler; + Result result = static_cast<Result>( d.vkCreateSampler( m_device, + reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSampler *>( &sampler ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + + return detail::createResultValueType( result, std::move( sampler ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Sampler, Dispatch>>::type + Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function <vkCreateSampler> requires <VK_VERSION_1_0>" ); +# endif + + Sampler sampler; + Result result = static_cast<Result>( d.vkCreateSampler( m_device, + reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSampler *>( &sampler ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Sampler, Dispatch>( sampler, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( static_cast<VkDevice>( m_device ), static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySampler, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySampler.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function <vkDestroySampler> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDescriptorSetLayout( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DescriptorSetLayout>::type Device::createDescriptorSetLayout( + const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); +# endif + + DescriptorSetLayout setLayout; + Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + + return detail::createResultValueType( result, std::move( setLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( + const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function <vkCreateDescriptorSetLayout> requires <VK_VERSION_1_0>" ); +# endif + + DescriptorSetLayout setLayout; + Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DescriptorSetLayout, Dispatch>( setLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyDescriptorSetLayout( + m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorSetLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorSetLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function <vkDestroyDescriptorSetLayout> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyDescriptorSetLayout( + m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDescriptorPool( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DescriptorPool>::type + Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); +# endif + + DescriptorPool descriptorPool; + Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, + reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + + return detail::createResultValueType( result, std::move( descriptorPool ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorPool, Dispatch>>::type + Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function <vkCreateDescriptorPool> requires <VK_VERSION_1_0>" ); +# endif + + DescriptorPool descriptorPool; + Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, + reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DescriptorPool, Dispatch>( descriptorPool, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function <vkDestroyDescriptorPool> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, + DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkResetDescriptorPool( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); + } +#else + // wrapper function for command vkResetDescriptorPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetDescriptorPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( + d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo * pAllocateInfo, + DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAllocateDescriptorSets( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), + reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template <typename DescriptorSetAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, DescriptorSet>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast<Result>( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + + return detail::createResultValueType( result, std::move( descriptorSets ) ); + } + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template <typename DescriptorSetAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, DescriptorSet>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type + Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); + Result result = static_cast<Result>( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + + return detail::createResultValueType( result, std::move( descriptorSets ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template <typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast<Result>( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + detail::PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); + for ( auto const & descriptorSet : descriptorSets ) + { + uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); + } + return detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); + } + + // wrapper function for command vkAllocateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAllocateDescriptorSets.html + template <typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if<std::is_same<typename DescriptorSetAllocator::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function <vkAllocateDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast<Result>( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + detail::PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); + for ( auto const & descriptorSet : descriptorSets ) + { + uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); + } + return detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorPool>( descriptorPool ), + descriptorSetCount, + reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> const & descriptorSets, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkFreeDescriptorSets( + m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result( Device::free )( DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkFreeDescriptorSets( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorPool>( descriptorPool ), + descriptorSetCount, + reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkFreeDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkFreeDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE + typename ResultValueType<void>::type( Device::free )( DescriptorPool descriptorPool, + ArrayProxy<const DescriptorSet> const & descriptorSets, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function <vkFreeDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + Result result = static_cast<Result>( d.vkFreeDescriptorSets( + m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::( Device::free )" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ), + descriptorWriteCount, + reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> const & descriptorWrites, + ArrayProxy<const CopyDescriptorSet> const & descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindPipeline, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipeline.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipeline( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); + } + + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t firstSet, + ArrayProxy<const DescriptorSet> const & descriptorSets, + ArrayProxy<const uint32_t> const & dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, + ImageLayout imageLayout, + const ClearColorValue * pColor, + uint32_t rangeCount, + const ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkImage>( image ), + static_cast<VkImageLayout>( imageLayout ), + reinterpret_cast<const VkClearColorValue *>( pColor ), + rangeCount, + reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearColorImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearColorImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, + ImageLayout imageLayout, + const ClearColorValue & color, + ArrayProxy<const ImageSubresourceRange> const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdClearColorImage( m_commandBuffer, + static_cast<VkImage>( image ), + static_cast<VkImageLayout>( imageLayout ), + reinterpret_cast<const VkClearColorValue *>( &color ), + ranges.size(), + reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatch, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatch.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDispatchIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchIndirect.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchIndirect( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); + } + + // wrapper function for command vkCmdSetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); + } + + // wrapper function for command vkCmdResetEvent, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, + const Event * pEvents, + PipelineStageFlags srcStageMask, + PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ), + eventCount, + reinterpret_cast<const VkEvent *>( pEvents ), + static_cast<VkPipelineStageFlags>( srcStageMask ), + static_cast<VkPipelineStageFlags>( dstStageMask ), + memoryBarrierCount, + reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const Event> const & events, + PipelineStageFlags srcStageMask, + PipelineStageFlags dstStageMask, + ArrayProxy<const MemoryBarrier> const & memoryBarriers, + ArrayProxy<const BufferMemoryBarrier> const & bufferMemoryBarriers, + ArrayProxy<const ImageMemoryBarrier> const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast<const VkEvent *>( events.data() ), + static_cast<VkPipelineStageFlags>( srcStageMask ), + static_cast<VkPipelineStageFlags>( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( + PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineLayout>( layout ), + static_cast<VkShaderStageFlags>( stageFlags ), + offset, + size, + pValues ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants.html + template <typename ValuesType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, + ShaderStageFlags stageFlags, + uint32_t offset, + ArrayProxy<const ValuesType> const & values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdPushConstants( m_commandBuffer, + static_cast<VkPipelineLayout>( layout ), + static_cast<VkShaderStageFlags>( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast<const void *>( values.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, + uint32_t createInfoCount, + const GraphicsPipelineCreateInfo * pCreateInfos, + const AllocationCallbacks * pAllocator, + Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateGraphicsPipelines( static_cast<VkDevice>( m_device ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfoCount, + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipeline *>( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createGraphicsPipelines( PipelineCache pipelineCache, + ArrayProxy<const GraphicsPipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createGraphicsPipelines( PipelineCache pipelineCache, + ArrayProxy<const GraphicsPipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( PipelineCache pipelineCache, + const GraphicsPipelineCreateInfo & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<Pipeline>( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, + ArrayProxy<const GraphicsPipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, + ArrayProxy<const GraphicsPipelineCreateInfo> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateGraphicsPipelines, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateGraphicsPipelines.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( + PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function <vkCreateGraphicsPipelines> requires <VK_VERSION_1_0>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<UniqueHandle<Pipeline, Dispatch>>( + result, UniqueHandle<Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateFramebuffer( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Framebuffer>::type + Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); +# endif + + Framebuffer framebuffer; + Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, + reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + + return detail::createResultValueType( result, std::move( framebuffer ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Framebuffer, Dispatch>>::type + Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function <vkCreateFramebuffer> requires <VK_VERSION_1_0>" ); +# endif + + Framebuffer framebuffer; + Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, + reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<Framebuffer, Dispatch>( framebuffer, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( + static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( + static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyFramebuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyFramebuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function <vkDestroyFramebuffer> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateRenderPass( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<RenderPass>::type + Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); +# endif + + RenderPass renderPass; + Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, + reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + + return detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass, Dispatch>>::type + Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function <vkCreateRenderPass> requires <VK_VERSION_1_0>" ); +# endif + + RenderPass renderPass; + Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, + reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<RenderPass, Dispatch>( renderPass, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( + static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( + static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function <vkDestroyRenderPass> requires <VK_VERSION_1_0>" ); +# endif + + d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D * pGranularity, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderAreaGranularity( static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderAreaGranularity, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderAreaGranularity.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" ); +# endif + + Extent2D granularity; + d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport * pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewport( static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewport.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissor( static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissor, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissor.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetLineWidth, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineWidth.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth ); + } + + // wrapper function for command vkCmdSetDepthBias, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + // wrapper function for command vkCmdSetBlendConstants, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetBlendConstants.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants ); + } + + // wrapper function for command vkCmdSetDepthBounds, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBounds.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + } + + // wrapper function for command vkCmdSetStencilCompareMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilCompareMask.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); + } + + // wrapper function for command vkCmdSetStencilWriteMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilWriteMask.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); + } + + // wrapper function for command vkCmdSetStencilReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilReference.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference ); + } + + // wrapper function for command vkCmdBindIndexBuffer, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkIndexType>( indexType ) ); + } + + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( + uint32_t firstBinding, uint32_t bindingCount, const Buffer * pBuffers, const DeviceSize * pOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast<const VkBuffer *>( pBuffers ), + reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + ArrayProxy<const Buffer> const & buffers, + ArrayProxy<const DeviceSize> const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast<const VkBuffer *>( buffers.data() ), + reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDraw, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDraw.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::draw( + uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); + } + + // wrapper function for command vkCmdDrawIndexed, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexed.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + // wrapper function for command vkCmdDrawIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirect.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirect( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirect, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirect.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirect( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, + ImageLayout srcImageLayout, + Image dstImage, + ImageLayout dstImageLayout, + uint32_t regionCount, + const ImageBlit * pRegions, + Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regionCount, + reinterpret_cast<const VkImageBlit *>( pRegions ), + static_cast<VkFilter>( filter ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, + ImageLayout srcImageLayout, + Image dstImage, + ImageLayout dstImageLayout, + ArrayProxy<const ImageBlit> const & regions, + Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdBlitImage( m_commandBuffer, + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regions.size(), + reinterpret_cast<const VkImageBlit *>( regions.data() ), + static_cast<VkFilter>( filter ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, + ImageLayout imageLayout, + const ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkImage>( image ), + static_cast<VkImageLayout>( imageLayout ), + reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), + rangeCount, + reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearDepthStencilImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearDepthStencilImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, + ImageLayout imageLayout, + const ClearDepthStencilValue & depthStencil, + ArrayProxy<const ImageSubresourceRange> const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast<VkImage>( image ), + static_cast<VkImageLayout>( imageLayout ), + reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), + ranges.size(), + reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( + uint32_t attachmentCount, const ClearAttachment * pAttachments, uint32_t rectCount, const ClearRect * pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ), + attachmentCount, + reinterpret_cast<const VkClearAttachment *>( pAttachments ), + rectCount, + reinterpret_cast<const VkClearRect *>( pRects ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdClearAttachments, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdClearAttachments.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const ClearAttachment> const & attachments, + ArrayProxy<const ClearRect> const & rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast<const VkClearAttachment *>( attachments.data() ), + rects.size(), + reinterpret_cast<const VkClearRect *>( rects.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, + ImageLayout srcImageLayout, + Image dstImage, + ImageLayout dstImageLayout, + uint32_t regionCount, + const ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regionCount, + reinterpret_cast<const VkImageResolve *>( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, + ImageLayout srcImageLayout, + Image dstImage, + ImageLayout dstImageLayout, + ArrayProxy<const ImageResolve> const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdResolveImage( m_commandBuffer, + static_cast<VkImage>( srcImage ), + static_cast<VkImageLayout>( srcImageLayout ), + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + regions.size(), + reinterpret_cast<const VkImageResolve *>( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const RenderPassBeginInfo * pRenderPassBegin, SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), + static_cast<VkSubpassContents>( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" ); +# endif + + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) ); + } + + // wrapper function for command vkCmdEndRenderPass, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + + //=== VK_VERSION_1_1 === + + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumerateInstanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumerateInstanceVersion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" ); +# endif + + uint32_t apiVersion; + Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + + return detail::createResultValueType( result, std::move( apiVersion ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, + const BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBindBufferMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindBufferMemory2( ArrayProxy<const BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); +# endif + + Result result = + static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, + const BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBindImageMemory2( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindImageMemory2( ArrayProxy<const BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); +# endif + + Result result = + static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeatures( + static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPeerMemoryFeatures, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeatures.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures && + "Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); +# endif + + PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDeviceMask, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMask.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, + PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template <typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, PhysicalDeviceGroupProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && + "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); +# endif + + std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroups, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroups.html + template <typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, PhysicalDeviceGroupProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type + Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && + "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); +# endif + + std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && + "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && + "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && + "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetBufferMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && + "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && + "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( + m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && + "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( + m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && + "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && + "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + PhysicalDeviceFeatures2 & features = structureChain.template get<PhysicalDeviceFeatures2>(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( PhysicalDeviceProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && + "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && + "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + PhysicalDeviceProperties2 & properties = structureChain.template get<PhysicalDeviceProperties2>(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2( Format format, FormatProperties2 * pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2( Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && + "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && + "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + FormatProperties2 & formatProperties = structureChain.template get<FormatProperties2>(); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), + reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ImageFormatProperties2>::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && + "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + ImageFormatProperties2 imageFormatProperties; + Result result = + static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), + reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && + "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + ImageFormatProperties2 & imageFormatProperties = structureChain.template get<ImageFormatProperties2>(); + Result result = + static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), + reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template <typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, QueueFamilyProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template <typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, QueueFamilyProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> + PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains; + std::vector<QueueFamilyProperties2> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get<QueueFamilyProperties2>().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get<QueueFamilyProperties2>() = queueFamilyProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> + PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); + std::vector<QueueFamilyProperties2> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get<QueueFamilyProperties2>().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get<QueueFamilyProperties2>() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && + "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && + "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<PhysicalDeviceMemoryProperties2>(); + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), + pPropertyCount, + reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template <typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, SparseImageFormatProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> + PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), + &propertyCount, + reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html + template <typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, SparseImageFormatProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> + PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), + &propertyCount, + reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTrimCommandPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPool( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); + } + + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getQueue2( const DeviceQueueInfo2 * pQueueInfo, Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceQueue2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceQueue2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function <vkGetDeviceQueue2> requires <VK_VERSION_1_1>" ); +# endif + + Queue queue; + d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); + + return queue; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), + reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalBufferProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties && + "Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); +# endif + + ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), + reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), + reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalFenceProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFenceProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties && + "Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); +# endif + + ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), + reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), + reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalSemaphoreProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties && + "Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); +# endif + + ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), + reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchBase, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBase.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBase( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( + const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && + "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, + reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + + return detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate, Dispatch>>::type + Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && + "Function <vkCreateDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, + reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && + "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + d.vkDestroyDescriptorUpdateTemplate( + m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && + "Function <vkDestroyDescriptorUpdateTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + d.vkDestroyDescriptorUpdateTemplate( + m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, + DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSet>( descriptorSet ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplate.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, + DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate && + "Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast<VkDescriptorSet>( descriptorSet ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const void *>( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo * pCreateInfo, + DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); +# endif + + DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); + + return support; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupport, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupport.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + DescriptorSetLayoutSupport & support = structureChain.template get<DescriptorSetLayoutSupport>(); + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( + const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && + "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + + return detail::createResultValueType( result, std::move( ycbcrConversion ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion, Dispatch>>::type + Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && + "Function <vkCreateSamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<SamplerYcbcrConversion, Dispatch>( ycbcrConversion, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), + static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && + "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + d.vkDestroySamplerYcbcrConversion( + m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ), + static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversion, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversion.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && + "Function <vkDestroySamplerYcbcrConversion> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + d.vkDestroySamplerYcbcrConversion( + m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_2 === + + // wrapper function for command vkResetQueryPool, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPool.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); + } + + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreCounterValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValue.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( Semaphore semaphore, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); +# endif + + uint64_t value; + Result result = static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + + return detail::createResultValueType( result, std::move( value ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitSemaphores, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphores.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); +# endif + + Result result = static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSignalSemaphore, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphore.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); +# endif + + Result result = static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<DeviceAddress>( + d.vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddress, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddress.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress && + "Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); + + return static_cast<DeviceAddress>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddress.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress && + "Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddress, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress && + "Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDrawIndirectCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCount.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCount, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCount.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const RenderPassCreateInfo2 * pCreateInfo, + const AllocationCallbacks * pAllocator, + RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateRenderPass2( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<RenderPass>::type + Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + RenderPass renderPass; + Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, + reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + + return detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass, Dispatch>>::type + Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function <vkCreateRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + RenderPass renderPass; + Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, + reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); + + return detail::createResultValueType( result, + UniqueHandle<RenderPass, Dispatch>( renderPass, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo * pRenderPassBegin, + const SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + d.vkCmdBeginRenderPass2( + m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo * pSubpassBeginInfo, + const SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), + reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdNextSubpass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + d.vkCmdNextSubpass2( + m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRenderPass2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, + PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template <typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, PhysicalDeviceToolProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type + PhysicalDevice::getToolProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && + "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); +# endif + + std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; + uint32_t toolCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return detail::createResultValueType( result, std::move( toolProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceToolProperties, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolProperties.html + template <typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, PhysicalDeviceToolProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type + PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && + "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); +# endif + + std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return detail::createResultValueType( result, std::move( toolProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const PrivateDataSlotCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreatePrivateDataSlot( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PrivateDataSlot>::type + Device::createPrivateDataSlot( const PrivateDataSlotCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + PrivateDataSlot privateDataSlot; + Result result = static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); + + return detail::createResultValueType( result, std::move( privateDataSlot ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PrivateDataSlot, Dispatch>>::type + Device::createPrivateDataSlotUnique( const PrivateDataSlotCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function <vkCreatePrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + PrivateDataSlot privateDataSlot; + Result result = static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<PrivateDataSlot, Dispatch>( privateDataSlot, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlot( PrivateDataSlot privateDataSlot, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( + static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( PrivateDataSlot privateDataSlot, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + d.vkDestroyPrivateDataSlot( + m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( PrivateDataSlot privateDataSlot, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( + static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlot, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlot.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( PrivateDataSlot privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function <vkDestroyPrivateDataSlot> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + d.vkDestroyPrivateDataSlot( + m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( + ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkSetPrivateData( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); + } +#else + // wrapper function for command vkSetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateData.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setPrivateData( ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + Result result = static_cast<Result>( + d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getPrivateData( + ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateData( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPrivateData, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateData.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateData( ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + uint64_t data; + d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); + + return data; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteTimestamp2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::writeTimestamp2( PipelineStageFlags2 stage, QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); + } + + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::submit2( uint32_t submitCount, const SubmitInfo2 * pSubmits, Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkQueueSubmit2( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Queue::submit2( ArrayProxy<const SubmitInfo2> const & submits, Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif + + Result result = static_cast<Result>( + d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const DeviceBufferMemoryRequirements * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements( const DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && + "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirements.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements( const DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && + "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const DeviceImageMemoryRequirements * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements( const DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && + "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirements.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements( const DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && + "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirements( const DeviceImageMemoryRequirements & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && + "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirements, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirements.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirements( const DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && + "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( Event event, const DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( Event event, const DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResetEvent2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( Event event, PipelineStageFlags2 stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, + const Event * pEvents, + const DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ), + eventCount, + reinterpret_cast<const VkEvent *>( pEvents ), + reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( ArrayProxy<const Event> const & events, + ArrayProxy<const DependencyInfo> const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2( m_commandBuffer, + events.size(), + reinterpret_cast<const VkEvent *>( events.data() ), + reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const RenderingInfo * pRenderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRendering.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRendering, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdSetCullMode, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullMode.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) ); + } + + // wrapper function for command vkCmdSetFrontFace, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFace.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) ); + } + + // wrapper function for command vkCmdSetPrimitiveTopology, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopology.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( PrimitiveTopology primitiveTopology, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) ); + } + + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCount( uint32_t viewportCount, const Viewport * pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCount.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( ArrayProxy<const Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount && + "Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount, const Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCount( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissorWithCount, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCount.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( ArrayProxy<const Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount && + "Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const Buffer * pBuffers, + const DeviceSize * pOffsets, + const DeviceSize * pSizes, + const DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast<const VkBuffer *>( pBuffers ), + reinterpret_cast<const VkDeviceSize *>( pOffsets ), + reinterpret_cast<const VkDeviceSize *>( pSizes ), + reinterpret_cast<const VkDeviceSize *>( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + ArrayProxy<const Buffer> const & buffers, + ArrayProxy<const DeviceSize> const & offsets, + ArrayProxy<const DeviceSize> const & sizes, + ArrayProxy<const DeviceSize> const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 && + "Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast<const VkBuffer *>( buffers.data() ), + reinterpret_cast<const VkDeviceSize *>( offsets.data() ), + reinterpret_cast<const VkDeviceSize *>( sizes.data() ), + reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) ); + } + + // wrapper function for command vkCmdSetDepthWriteEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) ); + } + + // wrapper function for command vkCmdSetDepthCompareOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOp.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) ); + } + + // wrapper function for command vkCmdSetDepthBoundsTestEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilTestEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilOp, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOp.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( + StencilFaceFlags faceMask, StencilOp failOp, StencilOp passOp, StencilOp depthFailOp, CompareOp compareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkStencilFaceFlags>( faceMask ), + static_cast<VkStencilOp>( failOp ), + static_cast<VkStencilOp>( passOp ), + static_cast<VkStencilOp>( depthFailOp ), + static_cast<VkCompareOp>( compareOp ) ); + } + + // wrapper function for command vkCmdSetRasterizerDiscardEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) ); + } + + // wrapper function for command vkCmdSetDepthBiasEnable, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) ); + } + + // wrapper function for command vkCmdSetPrimitiveRestartEnable, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnable.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( Bool32 primitiveRestartEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) ); + } + + //=== VK_VERSION_1_4 === + + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2( const MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkMapMemory2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfo *>( pMemoryMapInfo ), ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory2( const MemoryMapInfo & memoryMapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2 && "Function <vkMapMemory2> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); +# endif + + void * pData; + Result result = static_cast<Result>( d.vkMapMemory2( m_device, reinterpret_cast<const VkMemoryMapInfo *>( &memoryMapInfo ), &pData ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2( const MemoryUnmapInfo * pMemoryUnmapInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkUnmapMemory2( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfo *>( pMemoryUnmapInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUnmapMemory2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2( const MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2 && "Function <vkUnmapMemory2> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkUnmapMemory2( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( &memoryUnmapInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getImageSubresourceLayout( const DeviceImageSubresourceInfo * pInfo, SubresourceLayout2 * pLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayout( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageSubresourceInfo *>( pInfo ), reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE SubresourceLayout2 Device::getImageSubresourceLayout( const DeviceImageSubresourceInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function <vkGetDeviceImageSubresourceLayout> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayout, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayout.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageSubresourceLayout( const DeviceImageSubresourceInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function <vkGetDeviceImageSubresourceLayout> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + SubresourceLayout2 & layout = structureChain.template get<SubresourceLayout2>(); + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2( Image image, + const ImageSubresource2 * pSubresource, + SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2( static_cast<VkDevice>( m_device ), + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), + reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE SubresourceLayout2 Device::getImageSubresourceLayout2( Image image, + const ImageSubresource2 & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function <vkGetImageSubresourceLayout2> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( &subresource ), + reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getImageSubresourceLayout2( Image image, const ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function <vkGetImageSubresourceLayout2> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + SubresourceLayout2 & layout = structureChain.template get<SubresourceLayout2>(); + d.vkGetImageSubresourceLayout2( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( &subresource ), + reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImage( const CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkCopyMemoryToImage( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfo *>( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::copyMemoryToImage( const CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImage && "Function <vkCopyMemoryToImage> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyMemoryToImage( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( ©MemoryToImageInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemory( const CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkCopyImageToMemory( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfo *>( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToMemory, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemory.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::copyImageToMemory( const CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemory && "Function <vkCopyImageToMemory> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyImageToMemory( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( ©ImageToMemoryInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImage( const CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkCopyImageToImage( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfo *>( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToImage, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImage.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::copyImageToImage( const CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImage && "Function <vkCopyImageToImage> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyImageToImage( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( ©ImageToImageInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayout( uint32_t transitionCount, + const HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkTransitionImageLayout( + static_cast<VkDevice>( m_device ), transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkTransitionImageLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayout.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::transitionImageLayout( ArrayProxy<const HostImageLayoutTransitionInfo> const & transitions, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayout && "Function <vkTransitionImageLayout> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( + d.vkTransitionImageLayout( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( transitions.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + set, + descriptorWriteCount, + reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t set, + ArrayProxy<const WriteDescriptorSet> const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet && "Function <vkCmdPushDescriptorSet> requires <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSet( m_commandBuffer, + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + set, + descriptorWrites.size(), + reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( + DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + static_cast<VkPipelineLayout>( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, + PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplate && + "Function <vkCmdPushDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate( m_commandBuffer, + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + static_cast<VkPipelineLayout>( layout ), + set, + reinterpret_cast<const void *>( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets2( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBindDescriptorSetsInfo *>( pBindDescriptorSetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2 && "Function <vkCmdBindDescriptorSets2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( &bindDescriptorSetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const PushConstantsInfo * pPushConstantsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushConstantsInfo *>( pPushConstantsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const PushConstantsInfo & pushConstantsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2 && "Function <vkCmdPushConstants2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const PushDescriptorSetInfo * pPushDescriptorSetInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushDescriptorSetInfo *>( pPushDescriptorSetInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const PushDescriptorSetInfo & pushDescriptorSetInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2 && "Function <vkCmdPushDescriptorSet2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( &pushDescriptorSetInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2( const PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( pPushDescriptorSetWithTemplateInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2( const PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2 && + "Function <vkCmdPushDescriptorSetWithTemplate2> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, + reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( &pushDescriptorSetWithTemplateInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetLineStipple, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStipple.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStipple( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + // wrapper function for command vkCmdBindIndexBuffer2, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::bindIndexBuffer2( Buffer buffer, DeviceSize offset, DeviceSize size, IndexType indexType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkDeviceSize>( size ), + static_cast<VkIndexType>( indexType ) ); + } + + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getRenderingAreaGranularity( const RenderingAreaInfo * pRenderingAreaInfo, Extent2D * pGranularity, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularity( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderingAreaInfo *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderingAreaGranularity, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularity.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Extent2D Device::getRenderingAreaGranularity( const RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularity && "Function <vkGetRenderingAreaGranularity> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + Extent2D granularity; + d.vkGetRenderingAreaGranularity( + m_device, reinterpret_cast<const VkRenderingAreaInfo *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingAttachmentLocations( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( pLocationInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingAttachmentLocations, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocations.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocations && + "Function <vkCmdSetRenderingAttachmentLocations> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( &locationInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndices( const RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingInputAttachmentIndices( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( pInputAttachmentIndexInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingInputAttachmentIndices, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndices.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndices( const RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndices && + "Function <vkCmdSetRenderingInputAttachmentIndices> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( &inputAttachmentIndexInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Instance::destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); +# endif + + d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function <vkDestroySurfaceKHR> requires <VK_KHR_surface>" ); +# endif + + d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32 * pSupported, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Bool32>::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" ); +# endif + + Bool32 supported; + Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( + m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + + return detail::createResultValueType( result, std::move( supported ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, + SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceCapabilitiesKHR>::type + PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" ); +# endif + + SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + pSurfaceFormatCount, + reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template <typename SurfaceFormatKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, SurfaceFormatKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type + PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); +# endif + + std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = + static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormatsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html + template <typename SurfaceFormatKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SurfaceFormatKHRAllocator::value_type, SurfaceFormatKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type + PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" ); +# endif + + std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = + static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return detail::createResultValueType( result, std::move( surfaceFormats ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, + uint32_t * pPresentModeCount, + PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + pPresentModeCount, + reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template <typename PresentModeKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, PresentModeKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type + PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); +# endif + + std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return detail::createResultValueType( result, std::move( presentModes ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html + template <typename PresentModeKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, PresentModeKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type + PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" ); +# endif + + std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return detail::createResultValueType( result, std::move( presentModes ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSwapchainKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type + Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); +# endif + + SwapchainKHR swapchain; + Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + + return detail::createResultValueType( result, std::move( swapchain ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type + Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function <vkCreateSwapchainKHR> requires <VK_KHR_swapchain>" ); +# endif + + SwapchainKHR swapchain; + Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SwapchainKHR, Dispatch>( swapchain, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); +# endif + + d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySwapchainKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySwapchainKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function <vkDestroySwapchainKHR> requires <VK_KHR_swapchain>" ); +# endif + + d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSwapchainImagesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template <typename ImageAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ImageAllocator::value_type, Image>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type + Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); +# endif + + std::vector<Image, ImageAllocator> swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast<Result>( d.vkGetSwapchainImagesKHR( + m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return detail::createResultValueType( result, std::move( swapchainImages ) ); + } + + // wrapper function for command vkGetSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainImagesKHR.html + template <typename ImageAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ImageAllocator::value_type, Image>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type + Device::getSwapchainImagesKHR( SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" ); +# endif + + std::vector<Image, ImageAllocator> swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast<Result>( d.vkGetSwapchainImagesKHR( + m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return detail::createResultValueType( result, std::move( swapchainImages ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( + SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t * pImageIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ), + static_cast<VkSwapchainKHR>( swapchain ), + timeout, + static_cast<VkSemaphore>( semaphore ), + static_cast<VkFence>( fence ), + pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireNextImageKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImageKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> + Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" ); +# endif + + uint32_t imageIndex; + Result result = static_cast<Result>( d.vkAcquireNextImageKHR( + m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); +# endif + + return ResultValue<uint32_t>( result, std::move( imageIndex ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueuePresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueuePresentKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" ); +# endif + + Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); +# endif + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPresentCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DeviceGroupPresentCapabilitiesKHR>::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR && + "Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); +# endif + + DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast<Result>( + d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + + return detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, + DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupSurfacePresentModesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DeviceGroupPresentModeFlagsKHR>::type + Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR && + "Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); +# endif + + DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( + m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + + return detail::createResultValueType( result, std::move( modes ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, uint32_t * pRectCount, Rect2D * pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template <typename Rect2DAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, Rect2D>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type + PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && + "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); +# endif + + std::vector<Rect2D, Rect2DAllocator> rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return detail::createResultValueType( result, std::move( rects ) ); + } + + // wrapper function for command vkGetPhysicalDevicePresentRectanglesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html + template <typename Rect2DAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Rect2DAllocator::value_type, Rect2D>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type + PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && + "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); +# endif + + std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator ); + uint32_t rectCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return detail::createResultValueType( result, std::move( rects ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkAcquireNextImage2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireNextImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireNextImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" ); +# endif + + uint32_t imageIndex; + Result result = + static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); +# endif + + return ResultValue<uint32_t>( result, std::move( imageIndex ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template <typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, DisplayPropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html + template <typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayPropertiesKHRAllocator::value_type, DisplayPropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template <typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, DisplayPlanePropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPlanePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html + template <typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayPlanePropertiesKHRAllocator::value_type, DisplayPlanePropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template <typename DisplayKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, DisplayKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayKHR, DisplayKHRAllocator> displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast<Result>( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return detail::createResultValueType( result, std::move( displays ) ); + } + + // wrapper function for command vkGetDisplayPlaneSupportedDisplaysKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html + template <typename DisplayKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayKHRAllocator::value_type, DisplayKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function <vkGetDisplayPlaneSupportedDisplaysKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); + uint32_t displayCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast<Result>( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return detail::createResultValueType( result, std::move( displays ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, + uint32_t * pPropertyCount, + DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayKHR>( display ), + pPropertyCount, + reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template <typename DisplayModePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, DisplayModePropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type + PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDisplayModePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModePropertiesKHR.html + template <typename DisplayModePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayModePropertiesKHRAllocator::value_type, DisplayModePropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type + PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" ); +# endif + + std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, + const DisplayModeCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDisplayModeKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayKHR>( display ), + reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( + DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); +# endif + + DisplayModeKHR mode; + Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast<VkDisplayKHR>( display ), + reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + + return detail::createResultValueType( result, std::move( mode ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDisplayModeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayModeKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DisplayModeKHR, Dispatch>>::type PhysicalDevice::createDisplayModeKHRUnique( + DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function <vkCreateDisplayModeKHR> requires <VK_KHR_display>" ); +# endif + + DisplayModeKHR mode; + Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast<VkDisplayKHR>( display ), + reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DisplayModeKHR, Dispatch>( mode, detail::ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, + uint32_t planeIndex, + DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayModeKHR>( mode ), + planeIndex, + reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DisplayPlaneCapabilitiesKHR>::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" ); +# endif + + DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( + m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + + return detail::createResultValueType( result, std::move( capabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( + const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDisplayPlaneSurfaceKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDisplayPlaneSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( + const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function <vkCreateDisplayPlaneSurfaceKHR> requires <VK_KHR_display>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display_swapchain === + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const SwapchainCreateInfoKHR * pCreateInfos, + const AllocationCallbacks * pAllocator, + SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( static_cast<VkDevice>( m_device ), + swapchainCount, + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename SwapchainKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, SwapchainKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( + ArrayProxy<const SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); +# endif + + std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + + return detail::createResultValueType( result, std::move( swapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename SwapchainKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, SwapchainKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); +# endif + + std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); + Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + + return detail::createResultValueType( result, std::move( swapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type + Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); +# endif + + SwapchainKHR swapchain; + Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + + return detail::createResultValueType( result, std::move( swapchain ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename Dispatch, + typename SwapchainKHRAllocator, + typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); +# endif + + std::vector<SwapchainKHR> swapchains( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; + uniqueSwapchains.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); + } + return detail::createResultValueType( result, std::move( uniqueSwapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename Dispatch, + typename SwapchainKHRAllocator, + typename std::enable_if<std::is_same<typename SwapchainKHRAllocator::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); +# endif + + std::vector<SwapchainKHR> swapchains( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + createInfos.size(), + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + uniqueSwapchains.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); + } + return detail::createResultValueType( result, std::move( uniqueSwapchains ) ); + } + + // wrapper function for command vkCreateSharedSwapchainsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSharedSwapchainsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type + Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function <vkCreateSharedSwapchainsKHR> requires <VK_KHR_display_swapchain>" ); +# endif + + SwapchainKHR swapchain; + Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, + 1, + reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SwapchainKHR, Dispatch>( swapchain, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateXlibSurfaceKHR( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateXlibSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXlibSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type + Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function <vkCreateXlibSurfaceKHR> requires <VK_KHR_xlib_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceXlibPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + + return static_cast<Bool32>( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateXcbSurfaceKHR( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateXcbSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateXcbSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type + Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function <vkCreateXcbSurfaceKHR> requires <VK_KHR_xcb_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceXcbPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + + return static_cast<Bool32>( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateWaylandSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWaylandSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( + const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function <vkCreateWaylandSurfaceKHR> requires <VK_KHR_wayland_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, display ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceWaylandPresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + + return static_cast<Bool32>( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAndroidSurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAndroidSurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( + const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function <vkCreateAndroidSurfaceKHR> requires <VK_KHR_android_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateWin32SurfaceKHR( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateWin32SurfaceKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateWin32SurfaceKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createWin32SurfaceKHRUnique( + const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function <vkCreateWin32SurfaceKHR> requires <VK_KHR_win32_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceWin32PresentationSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex ) ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( + const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); +# endif + + DebugReportCallbackEXT callback; + Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + + return detail::createResultValueType( result, std::move( callback ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugReportCallbackEXT, Dispatch>>::type + Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function <vkCreateDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); +# endif + + DebugReportCallbackEXT callback; + Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DebugReportCallbackEXT, Dispatch>( callback, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); +# endif + + d.vkDestroyDebugReportCallbackEXT( + m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Instance::destroy( DebugReportCallbackEXT callback, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + static_cast<VkInstance>( m_instance ), static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugReportCallbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugReportCallbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Instance::destroy( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function <vkDestroyDebugReportCallbackEXT> requires <VK_EXT_debug_report>" ); +# endif + + d.vkDestroyDebugReportCallbackEXT( + m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, + DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ), + static_cast<VkDebugReportFlagsEXT>( flags ), + static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugReportMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugReportMessageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, + DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" ); +# endif + + d.vkDebugReportMessageEXT( m_instance, + static_cast<VkDebugReportFlagsEXT>( flags ), + static_cast<VkDebugReportObjectTypeEXT>( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === + + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugMarkerSetObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectTagEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" ); +# endif + + Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDebugMarkerSetObjectNameEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDebugMarkerSetObjectNameEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" ); +# endif + + Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDebugMarkerBeginEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerBeginEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" ); +# endif + + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDebugMarkerEndEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerEndEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDebugMarkerInsertEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDebugMarkerInsertEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" ); +# endif + + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileInfoKHR * pVideoProfile, + VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), + reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VideoCapabilitiesKHR>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); +# endif + + VideoCapabilitiesKHR capabilities; + Result result = static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return detail::createResultValueType( result, std::move( capabilities ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoCapabilitiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoCapabilitiesKHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + VideoCapabilitiesKHR & capabilities = structureChain.template get<VideoCapabilitiesKHR>(); + Result result = static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template <typename VideoFormatPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VideoFormatPropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); +# endif + + std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return detail::createResultValueType( result, std::move( videoFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template <typename VideoFormatPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename VideoFormatPropertiesKHRAllocator::value_type, VideoFormatPropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); +# endif + + std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return detail::createResultValueType( result, std::move( videoFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains; + std::vector<VideoFormatPropertiesKHR> videoFormatProperties; + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get<VideoFormatPropertiesKHR>().pNext; + } + result = static_cast<Result>( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get<VideoFormatPropertiesKHR>() = videoFormatProperties[i]; + } + return detail::createResultValueType( result, std::move( structureChains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoFormatPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoFormatPropertiesKHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); + std::vector<VideoFormatPropertiesKHR> videoFormatProperties; + uint32_t videoFormatPropertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get<VideoFormatPropertiesKHR>().pNext; + } + result = static_cast<Result>( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get<VideoFormatPropertiesKHR>() = videoFormatProperties[i]; + } + return detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VideoSessionCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateVideoSessionKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VideoSessionKHR>::type + Device::createVideoSessionKHR( const VideoSessionCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); +# endif + + VideoSessionKHR videoSession; + Result result = static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + + return detail::createResultValueType( result, std::move( videoSession ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VideoSessionKHR, Dispatch>>::type + Device::createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function <vkCreateVideoSessionKHR> requires <VK_KHR_video_queue>" ); +# endif + + VideoSessionKHR videoSession; + Result result = static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<VideoSessionKHR, Dispatch>( videoSession, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyVideoSessionKHR( VideoSessionKHR videoSession, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( + static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyVideoSessionKHR( VideoSessionKHR videoSession, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( VideoSessionKHR videoSession, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( + static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( VideoSessionKHR videoSession, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function <vkDestroyVideoSessionKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionKHR>( videoSession ), + pMemoryRequirementsCount, + reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template < + typename VideoSessionMemoryRequirementsKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type + Device::getVideoSessionMemoryRequirementsKHR( VideoSessionKHR videoSession, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); +# endif + + std::vector<VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements; + uint32_t memoryRequirementsCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast<Result>( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast<VkVideoSessionKHR>( videoSession ), + &memoryRequirementsCount, + reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return detail::createResultValueType( result, std::move( memoryRequirements ) ); + } + + // wrapper function for command vkGetVideoSessionMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetVideoSessionMemoryRequirementsKHR.html + template < + typename VideoSessionMemoryRequirementsKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename VideoSessionMemoryRequirementsKHRAllocator::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type + Device::getVideoSessionMemoryRequirementsKHR( VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" ); +# endif + + std::vector<VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( videoSessionMemoryRequirementsKHRAllocator ); + uint32_t memoryRequirementsCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast<Result>( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast<VkVideoSessionKHR>( videoSession ), + &memoryRequirementsCount, + reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return detail::createResultValueType( result, std::move( memoryRequirements ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindVideoSessionMemoryKHR( VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionKHR>( videoSession ), + bindSessionMemoryInfoCount, + reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindVideoSessionMemoryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindVideoSessionMemoryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( + VideoSessionKHR videoSession, ArrayProxy<const BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" ); +# endif + + Result result = + static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, + static_cast<VkVideoSessionKHR>( videoSession ), + bindSessionMemoryInfos.size(), + reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VideoSessionParametersKHR>::type Device::createVideoSessionParametersKHR( + const VideoSessionParametersCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); +# endif + + VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + + return detail::createResultValueType( result, std::move( videoSessionParameters ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VideoSessionParametersKHR, Dispatch>>::type + Device::createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function <vkCreateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); +# endif + + VideoSessionParametersKHR videoSessionParameters; + Result result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<VideoSessionParametersKHR, Dispatch>( videoSessionParameters, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( VideoSessionParametersKHR videoSessionParameters, + const VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), + reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::updateVideoSessionParametersKHR( + VideoSessionParametersKHR videoSessionParameters, const VideoSessionParametersUpdateInfoKHR & updateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); +# endif + + Result result = + static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, + static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), + reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VideoSessionParametersKHR videoSessionParameters, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VideoSessionParametersKHR videoSessionParameters, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkDestroyVideoSessionParametersKHR( + m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( VideoSessionParametersKHR videoSessionParameters, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( VideoSessionParametersKHR videoSessionParameters, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function <vkDestroyVideoSessionParametersKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkDestroyVideoSessionParametersKHR( + m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginVideoCodingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR * pEndCodingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndVideoCodingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdControlVideoCodingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdControlVideoCodingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" ); +# endif + + d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_decode_queue === + + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR * pDecodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecodeVideoKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & decodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" ); +# endif + + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_transform_feedback === + + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const Buffer * pBuffers, + const DeviceSize * pOffsets, + const DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast<const VkBuffer *>( pBuffers ), + reinterpret_cast<const VkDeviceSize *>( pOffsets ), + reinterpret_cast<const VkDeviceSize *>( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindTransformFeedbackBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTransformFeedbackBuffersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + ArrayProxy<const Buffer> const & buffers, + ArrayProxy<const DeviceSize> const & offsets, + ArrayProxy<const DeviceSize> const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast<const VkBuffer *>( buffers.data() ), + reinterpret_cast<const VkDeviceSize *>( offsets.data() ), + reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const Buffer * pCounterBuffers, + const DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstCounterBuffer, + counterBufferCount, + reinterpret_cast<const VkBuffer *>( pCounterBuffers ), + reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginTransformFeedbackEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginTransformFeedbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy<const Buffer> const & counterBuffers, + ArrayProxy<const DeviceSize> const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), + reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndTransformFeedbackEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const Buffer * pCounterBuffers, + const DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstCounterBuffer, + counterBufferCount, + reinterpret_cast<const VkBuffer *>( pCounterBuffers ), + reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndTransformFeedbackEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndTransformFeedbackEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + ArrayProxy<const Buffer> const & counterBuffers, + ArrayProxy<const DeviceSize> const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), + reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginQueryIndexedEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( + QueryPool queryPool, uint32_t query, QueryControlFlags flags, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQueryIndexedEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); + } + + // wrapper function for command vkCmdEndQueryIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndQueryIndexedEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index ); + } + + // wrapper function for command vkCmdDrawIndirectByteCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectByteCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + Buffer counterBuffer, + DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast<VkBuffer>( counterBuffer ), + static_cast<VkDeviceSize>( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const CuModuleCreateInfoNVX * pCreateInfo, + const AllocationCallbacks * pAllocator, + CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateCuModuleNVX( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<CuModuleNVX>::type + Device::createCuModuleNVX( const CuModuleCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); +# endif + + CuModuleNVX module; + Result result = static_cast<Result>( d.vkCreateCuModuleNVX( m_device, + reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + + return detail::createResultValueType( result, std::move( module ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CuModuleNVX, Dispatch>>::type + Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function <vkCreateCuModuleNVX> requires <VK_NVX_binary_import>" ); +# endif + + CuModuleNVX module; + Result result = static_cast<Result>( d.vkCreateCuModuleNVX( m_device, + reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCuModuleNVX *>( &module ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<CuModuleNVX, Dispatch>( module, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX * pCreateInfo, + const AllocationCallbacks * pAllocator, + CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateCuFunctionNVX( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<CuFunctionNVX>::type + Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); +# endif + + CuFunctionNVX function; + Result result = static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + + return detail::createResultValueType( result, std::move( function ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CuFunctionNVX, Dispatch>>::type + Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function <vkCreateCuFunctionNVX> requires <VK_NVX_binary_import>" ); +# endif + + CuFunctionNVX function; + Result result = static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCuFunctionNVX *>( &function ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<CuFunctionNVX, Dispatch>( function, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( CuModuleNVX module, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCuModuleNVX( CuModuleNVX module, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); +# endif + + d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CuModuleNVX module, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuModuleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuModuleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CuModuleNVX module, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function <vkDestroyCuModuleNVX> requires <VK_NVX_binary_import>" ); +# endif + + d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCuFunctionNVX( CuFunctionNVX function, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCuFunctionNVX( CuFunctionNVX function, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); +# endif + + d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CuFunctionNVX function, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( + static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCuFunctionNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCuFunctionNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CuFunctionNVX function, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function <vkDestroyCuFunctionNVX> requires <VK_NVX_binary_import>" ); +# endif + + d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX * pLaunchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCuLaunchKernelNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCuLaunchKernelNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" ); +# endif + + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_image_view_handle === + + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewHandleNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandleNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" ); +# endif + + uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const ImageViewHandleInfoNVX * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandle64NVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewHandle64NVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewHandle64NVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const ImageViewHandleInfoNVX & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandle64NVX && "Function <vkGetImageViewHandle64NVX> requires <VK_NVX_image_view_handle>" ); +# endif + + uint64_t result = d.vkGetImageViewHandle64NVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( ImageView imageView, + ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetImageViewAddressNVX( + static_cast<VkDevice>( m_device ), static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewAddressNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewAddressNVX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ImageViewAddressPropertiesNVX>::type + Device::getImageViewAddressNVX( ImageView imageView, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" ); +# endif + + ImageViewAddressPropertiesNVX properties; + Result result = static_cast<Result>( + d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCountAMD, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( Pipeline pipeline, + ShaderStageFlagBits shaderStage, + ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ), + static_cast<VkPipeline>( pipeline ), + static_cast<VkShaderStageFlagBits>( shaderStage ), + static_cast<VkShaderInfoTypeAMD>( infoType ), + pInfoSize, + pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> info; + size_t infoSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, + static_cast<VkPipeline>( pipeline ), + static_cast<VkShaderStageFlagBits>( shaderStage ), + static_cast<VkShaderInfoTypeAMD>( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, + static_cast<VkPipeline>( pipeline ), + static_cast<VkShaderStageFlagBits>( shaderStage ), + static_cast<VkShaderInfoTypeAMD>( infoType ), + &infoSize, + reinterpret_cast<void *>( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return detail::createResultValueType( result, std::move( info ) ); + } + + // wrapper function for command vkGetShaderInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderInfoAMD.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( + Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); + size_t infoSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, + static_cast<VkPipeline>( pipeline ), + static_cast<VkShaderStageFlagBits>( shaderStage ), + static_cast<VkShaderInfoTypeAMD>( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, + static_cast<VkPipeline>( pipeline ), + static_cast<VkShaderStageFlagBits>( shaderStage ), + static_cast<VkShaderInfoTypeAMD>( infoType ), + &infoSize, + reinterpret_cast<void *>( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return detail::createResultValueType( result, std::move( info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering === + + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const RenderingInfo * pRenderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderingKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderingKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( + const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, + reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateStreamDescriptorSurfaceGGP, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateStreamDescriptorSurfaceGGP.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( + const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function <vkCreateStreamDescriptorSurfaceGGP> requires <VK_GGP_stream_descriptor_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, + reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, + ImageType type, + ImageTiling tiling, + ImageUsageFlags usage, + ImageCreateFlags flags, + ExternalMemoryHandleTypeFlagsNV externalHandleType, + ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkImageTiling>( tiling ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageCreateFlags>( flags ), + static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), + reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalImageFormatPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalImageFormatPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ExternalImageFormatPropertiesNV>::type + PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, + ImageType type, + ImageTiling tiling, + ImageUsageFlags usage, + ImageCreateFlags flags, + ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" ); +# endif + + ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast<Result>( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + static_cast<VkFormat>( format ), + static_cast<VkImageType>( type ), + static_cast<VkImageTiling>( tiling ), + static_cast<VkImageUsageFlags>( usage ), + static_cast<VkImageCreateFlags>( flags ), + static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), + reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, + ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryWin32HandleNV( + static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type + Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" ); +# endif + + HANDLE handle; + Result result = static_cast<Result>( + d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + + return detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && + "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); + + return features; + } + + // wrapper function for command vkGetPhysicalDeviceFeatures2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFeatures2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && + "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + PhysicalDeviceFeatures2 & features = structureChain.template get<PhysicalDeviceFeatures2>(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && + "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); + + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceProperties2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && + "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + PhysicalDeviceProperties2 & properties = structureChain.template get<PhysicalDeviceProperties2>(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2 * pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2KHR( Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && + "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); + + return formatProperties; + } + + // wrapper function for command vkGetPhysicalDeviceFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( Format format, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && + "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + FormatProperties2 & formatProperties = structureChain.template get<FormatProperties2>(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), + reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ImageFormatProperties2>::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + ImageFormatProperties2 imageFormatProperties; + Result result = + static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), + reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + ImageFormatProperties2 & imageFormatProperties = structureChain.template get<ImageFormatProperties2>(); + Result result = + static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), + reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template <typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, QueueFamilyProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template <typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyProperties2Allocator::value_type, QueueFamilyProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains; + std::vector<QueueFamilyProperties2> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get<QueueFamilyProperties2>().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get<QueueFamilyProperties2>() = queueFamilyProperties[i]; + } + return structureChains; + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); + std::vector<QueueFamilyProperties2> queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get<QueueFamilyProperties2>().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get<QueueFamilyProperties2>() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && + "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); + + return memoryProperties; + } + + // wrapper function for command vkGetPhysicalDeviceMemoryProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && + "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<PhysicalDeviceMemoryProperties2>(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), + pPropertyCount, + reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template <typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, SparseImageFormatProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> + PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), + &propertyCount, + reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + // wrapper function for command vkGetPhysicalDeviceSparseImageFormatProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html + template <typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageFormatProperties2Allocator::value_type, SparseImageFormatProperties2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> + PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), + &propertyCount, + reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_device_group === + + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + static_cast<VkDevice>( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupPeerMemoryFeaturesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" ); +# endif + + PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDeviceMaskKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDeviceMaskKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask ); + } + + // wrapper function for command vkCmdDispatchBaseKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchBaseKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBaseKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateViSurfaceNN( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateViSurfaceNN, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateViSurfaceNN.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type + Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function <vkCreateViSurfaceNN> requires <VK_NN_vi_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + // wrapper function for command vkTrimCommandPoolKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTrimCommandPoolKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPoolKHR( static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, + PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template <typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, PhysicalDeviceGroupProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && + "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); +# endif + + std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceGroupsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html + template <typename PhysicalDeviceGroupPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceGroupPropertiesAllocator::value_type, PhysicalDeviceGroupProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type + Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && + "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" ); +# endif + + std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_memory_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), + reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalBufferPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalBufferPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" ); +# endif + + ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), + reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type + Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" ); +# endif + + HANDLE handle; + Result result = + static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ), + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + handle, + reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryWin32HandlePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryWin32HandlePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<MemoryWin32HandlePropertiesKHR>::type + Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" ); +# endif + + MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = + static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + handle, + reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" ); +# endif + + int fd; + Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, + int fd, + MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ), + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + fd, + reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryFdPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<MemoryFdPropertiesKHR>::type + Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" ); +# endif + + MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( + m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return detail::createResultValueType( result, std::move( memoryFdProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_semaphore_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), + reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalSemaphorePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalSemaphorePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( + const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" ); +# endif + + ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), + reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreWin32HandleKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); +# endif + + Result result = static_cast<Result>( + d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + + return detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type + Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" ); +# endif + + HANDLE handle; + Result result = static_cast<Result>( + d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); +# endif + + Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" ); +# endif + + int fd; + Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_push_descriptor === + + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + set, + descriptorWriteCount, + reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t set, + ArrayProxy<const WriteDescriptorSet> const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + set, + descriptorWrites.size(), + reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( + DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + static_cast<VkPipelineLayout>( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, + PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplateKHR && + "Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + static_cast<VkPipelineLayout>( layout ), + set, + reinterpret_cast<const void *>( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_conditional_rendering === + + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginConditionalRenderingEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" ); +# endif + + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndConditionalRenderingEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndConditionalRenderingEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + + //=== VK_KHR_descriptor_update_template === + + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( + const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && + "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, + reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + + return detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDescriptorUpdateTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate, Dispatch>>::type + Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && + "Function <vkCreateDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, + reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplateKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDescriptorUpdateTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDescriptorUpdateTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR && + "Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, + DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDescriptorSet>( descriptorSet ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateDescriptorSetWithTemplateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateDescriptorSetWithTemplateKHR.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, + DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR && + "Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" ); +# endif + + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast<VkDescriptorSet>( descriptorSet ), + static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), + reinterpret_cast<const void *>( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_clip_space_w_scaling === + + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWScalingNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + ArrayProxy<const ViewportWScalingNV> const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" ); +# endif + + d.vkCmdSetViewportWScalingNV( + m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) ); + } +#else + // wrapper function for command vkReleaseDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function <vkReleaseDisplayEXT> requires <VK_EXT_direct_mode_display>" ); +# endif + + Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, + DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, static_cast<VkDisplayKHR>( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireXlibDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireXlibDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); +# endif + + Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + + return detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, RROutput rrOutput, DisplayKHR * pDisplay, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetRandROutputDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DisplayKHR>::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); +# endif + + DisplayKHR display; + Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + + return detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetRandROutputDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRandROutputDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DisplayKHR, Dispatch>>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function <vkGetRandROutputDisplayEXT> requires <VK_EXT_acquire_xlib_display>" ); +# endif + + DisplayKHR display; + Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); + + return detail::createResultValueType( result, UniqueHandle<DisplayKHR, Dispatch>( display, detail::ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, + SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSurfaceKHR>( surface ), + reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceCapabilities2EXT>::type + PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" ); +# endif + + SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_display_control === + + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, + const DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkDisplayPowerControlEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDisplayPowerControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDisplayPowerControlEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" ); +# endif + + Result result = static_cast<Result>( + d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT * pDeviceEventInfo, + const AllocationCallbacks * pAllocator, + Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkRegisterDeviceEventEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkFence *>( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Fence>::type + Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); +# endif + + Fence fence; + Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFence *>( &fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + + return detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkRegisterDeviceEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDeviceEventEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence, Dispatch>>::type + Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function <vkRegisterDeviceEventEXT> requires <VK_EXT_display_control>" ); +# endif + + Fence fence; + Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFence *>( &fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Fence, Dispatch>( fence, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, + const DisplayEventInfoEXT * pDisplayEventInfo, + const AllocationCallbacks * pAllocator, + Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkRegisterDisplayEventEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDisplayKHR>( display ), + reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkFence *>( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Fence>::type Device::registerDisplayEventEXT( DisplayKHR display, + const DisplayEventInfoEXT & displayEventInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); +# endif + + Fence fence; + Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, + static_cast<VkDisplayKHR>( display ), + reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFence *>( &fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + + return detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkRegisterDisplayEventEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkRegisterDisplayEventEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence, Dispatch>>::type Device::registerDisplayEventEXTUnique( + DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function <vkRegisterDisplayEventEXT> requires <VK_EXT_display_control>" ); +# endif + + Fence fence; + Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, + static_cast<VkDisplayKHR>( display ), + reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkFence *>( &fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); + + return detail::createResultValueType( result, UniqueHandle<Fence, Dispatch>( fence, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, + SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSwapchainCounterEXT( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainCounterEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainCounterEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + ResultValue<uint64_t> +# else + typename ResultValueType<uint64_t>::type +# endif + Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" ); +# endif + + uint64_t counterValue; + Result result = static_cast<Result>( + d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT", { Result::eSuccess, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); +# endif + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + return ResultValue<uint64_t>( result, std::move( counterValue ) ); +# else + return detail::createResultValueType( result, std::move( counterValue ) ); +# endif + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_GOOGLE_display_timing === + + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, + RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ), + static_cast<VkSwapchainKHR>( swapchain ), + reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRefreshCycleDurationGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRefreshCycleDurationGOOGLE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<RefreshCycleDurationGOOGLE>::type + Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" ); +# endif + + RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( + m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + + return detail::createResultValueType( result, std::move( displayTimingProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ), + static_cast<VkSwapchainKHR>( swapchain ), + pPresentationTimingCount, + reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template <typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, PastPresentationTimingGOOGLE>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + ResultValue<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>> +# else + typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type +# endif + Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); +# endif + + std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = + static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast<VkSwapchainKHR>( swapchain ), + &presentationTimingCount, + reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE", { Result::eSuccess, Result::eIncomplete, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); +# endif + + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + return ResultValue<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>( result, std::move( presentationTimings ) ); +# else + return detail::createResultValueType( result, std::move( presentationTimings ) ); +# endif + } + + // wrapper function for command vkGetPastPresentationTimingGOOGLE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPastPresentationTimingGOOGLE.html + template <typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PastPresentationTimingGOOGLEAllocator::value_type, PastPresentationTimingGOOGLE>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + ResultValue<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>> +# else + typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type +# endif + Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" ); +# endif + + std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + Result result; + do + { + result = + static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast<VkSwapchainKHR>( swapchain ), + &presentationTimingCount, + reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE", { Result::eSuccess, Result::eIncomplete, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); +# endif + + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + return ResultValue<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>( result, std::move( presentationTimings ) ); +# else + return detail::createResultValueType( result, std::move( presentationTimings ) ); +# endif + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_discard_rectangles === + + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDiscardRectangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + ArrayProxy<const Rect2D> const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" ); +# endif + + d.vkCmdSetDiscardRectangleEXT( + m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDiscardRectangleEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( Bool32 discardRectangleEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( discardRectangleEnable ) ); + } + + // wrapper function for command vkCmdSetDiscardRectangleModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDiscardRectangleModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( DiscardRectangleModeEXT discardRectangleMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) ); + } + + //=== VK_EXT_hdr_metadata === + + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const SwapchainKHR * pSwapchains, + const HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ), + swapchainCount, + reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), + reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetHdrMetadataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const SwapchainKHR> const & swapchains, + ArrayProxy<const HdrMetadataEXT> const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), + reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === + + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const RenderPassCreateInfo2 * pCreateInfo, + const AllocationCallbacks * pAllocator, + RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateRenderPass2KHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<RenderPass>::type + Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + RenderPass renderPass; + Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + + return detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass, Dispatch>>::type + Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function <vkCreateRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + RenderPass renderPass; + Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkRenderPass *>( &renderPass ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<RenderPass, Dispatch>( renderPass, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo * pRenderPassBegin, + const SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, + const SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + d.vkCmdBeginRenderPass2KHR( + m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo * pSubpassBeginInfo, + const SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), + reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdNextSubpass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdNextSubpass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, + const SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + d.vkCmdNextSubpass2KHR( + m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRenderPass2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRenderPass2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" ); +# endif + + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); + } +#else + // wrapper function for command vkGetSwapchainStatusKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainStatusKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" ); +# endif + + Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); +# endif + + return static_cast<Result>( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), + reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalFencePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalFencePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" ); +# endif + + ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), + reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkImportFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); +# endif + + Result result = static_cast<Result>( + d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + + return detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetFenceWin32HandleKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceWin32HandleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceWin32HandleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type + Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" ); +# endif + + HANDLE handle; + Result result = + static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportFenceFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); +# endif + + Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFenceFdKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFenceFdKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" ); +# endif + + int fd; + Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + + return detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + PerformanceCounterKHR * pCounters, + PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + queueFamilyIndex, + pCounterCount, + reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), + reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template <typename PerformanceCounterKHRAllocator, + typename PerformanceCounterDescriptionKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PerformanceCounterKHRAllocator::value_type, PerformanceCounterKHR>::value && + std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, PerformanceCounterDescriptionKHR>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, + std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); +# endif + + std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, + std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> + data_; + std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; + std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast<Result>( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), + reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR.html + template <typename PerformanceCounterKHRAllocator, + typename PerformanceCounterDescriptionKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PerformanceCounterKHRAllocator::value_type, PerformanceCounterKHR>::value && + std::is_same<typename PerformanceCounterDescriptionKHRAllocator::value_type, PerformanceCounterDescriptionKHR>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, + std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" ); +# endif + + std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, + std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> + data_( + std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first; + std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast<Result>( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), + reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), + pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" ); +# endif + + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses ); + + return numPasses; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireProfilingLockKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" ); +# endif + + Result result = static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkReleaseProfilingLockKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseProfilingLockKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), + reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceCapabilities2KHR>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); +# endif + + SurfaceCapabilities2KHR surfaceCapabilities; + Result result = + static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<SurfaceCapabilities2KHR>(); + Result result = + static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template <typename SurfaceFormat2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, SurfaceFormat2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); +# endif + + std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template <typename SurfaceFormat2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SurfaceFormat2KHRAllocator::value_type, SurfaceFormat2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); +# endif + + std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains; + std::vector<SurfaceFormat2KHR> surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get<SurfaceFormat2KHR>().pNext; + } + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get<SurfaceFormat2KHR>() = surfaceFormats[i]; + } + return detail::createResultValueType( result, std::move( structureChains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfaceFormats2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type + PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); + std::vector<SurfaceFormat2KHR> surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get<SurfaceFormat2KHR>().pNext; + } + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get<SurfaceFormat2KHR>() = surfaceFormats[i]; + } + return detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_display_properties2 === + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template <typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, DisplayProperties2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && + "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayProperties2KHR.html + template <typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayProperties2KHRAllocator::value_type, DisplayProperties2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && + "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template <typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, DisplayPlaneProperties2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceDisplayPlaneProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDisplayPlaneProperties2KHR.html + template <typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayPlaneProperties2KHRAllocator::value_type, DisplayPlaneProperties2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, + uint32_t * pPropertyCount, + DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkDisplayKHR>( display ), + pPropertyCount, + reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template <typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, DisplayModeProperties2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type + PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template <typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DisplayModeProperties2KHRAllocator::value_type, DisplayModeProperties2KHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type + PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type + PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains; + std::vector<DisplayModeProperties2KHR> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get<DisplayModeProperties2KHR>().pNext; + } + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get<DisplayModeProperties2KHR>() = properties[i]; + } + return detail::createResultValueType( result, std::move( structureChains ) ); + } + + // wrapper function for command vkGetDisplayModeProperties2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayModeProperties2KHR.html + template <typename StructureChain, + typename StructureChainAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename StructureChainAllocator::value_type, StructureChain>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type + PhysicalDevice::getDisplayModeProperties2KHR( DisplayKHR display, StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); + std::vector<DisplayModeProperties2KHR> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get<DisplayModeProperties2KHR>().pNext; + } + result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get<DisplayModeProperties2KHR>() = properties[i]; + } + return detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), + reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDisplayPlaneCapabilities2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDisplayPlaneCapabilities2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DisplayPlaneCapabilities2KHR>::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" ); +# endif + + DisplayPlaneCapabilities2KHR capabilities; + Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), + reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return detail::createResultValueType( result, std::move( capabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateIOSSurfaceMVK( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIOSSurfaceMVK.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type + Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function <vkCreateIOSSurfaceMVK> requires <VK_MVK_ios_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMacOSSurfaceMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMacOSSurfaceMVK.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( + const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function <vkCreateMacOSSurfaceMVK> requires <VK_MVK_macos_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" ); +# endif + + Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + + return detail::createResultValueType( result ); + } + + // wrapper function for command vkSetDebugUtilsObjectNameEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectNameEXT.html + template <typename HandleType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setDebugUtilsObjectNameEXT( HandleType const & handle, std::string const & name, Dispatch const & d ) const + { + VULKAN_HPP_STATIC_ASSERT( VULKAN_HPP_NAMESPACE::isVulkanHandleType<HandleType>::value, "HandleType must be a Vulkan handle type" ); + // It might be, that neither constructors, nor setters, nor designated initializers are available... need to explicitly set member by member + VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT nameInfo; + nameInfo.objectType = handle.objectType; + nameInfo.objectHandle = reinterpret_cast<uint64_t>( static_cast<typename HandleType::CType>( handle ) ); + nameInfo.pObjectName = name.c_str(); + return setDebugUtilsObjectNameEXT( nameInfo, d ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" ); +# endif + + Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + + return detail::createResultValueType( result ); + } + + // wrapper function for command vkSetDebugUtilsObjectTagEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDebugUtilsObjectTagEXT.html + template <typename HandleType, typename TagType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setDebugUtilsObjectTagEXT( HandleType const & handle, uint64_t name, TagType const & tag, Dispatch const & d ) const + { + VULKAN_HPP_STATIC_ASSERT( VULKAN_HPP_NAMESPACE::isVulkanHandleType<HandleType>::value, "HandleType must be a Vulkan handle type" ); + // It might be, that neither constructors, nor setters, nor designated initializers are available... need to explicitly set member by member + VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT tagInfo; + tagInfo.objectType = handle.objectType; + tagInfo.objectHandle = reinterpret_cast<uint64_t>( static_cast<typename HandleType::CType>( handle ) ); + tagInfo.tagName = name; + tagInfo.tagSize = sizeof( TagType ); + tagInfo.pTag = &tag; + return setDebugUtilsObjectTagEXT( tagInfo, d ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueBeginDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueBeginDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueEndDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) ); + } + + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueInsertDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdInsertDebugUtilsLabelEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInsertDebugUtilsLabelEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( + const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); +# endif + + DebugUtilsMessengerEXT messenger; + Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + + return detail::createResultValueType( result, std::move( messenger ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT, Dispatch>>::type + Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function <vkCreateDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); +# endif + + DebugUtilsMessengerEXT messenger; + Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DebugUtilsMessengerEXT, Dispatch>( messenger, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), + static_cast<VkDebugUtilsMessengerEXT>( messenger ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Instance::destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ), + static_cast<VkDebugUtilsMessengerEXT>( messenger ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDebugUtilsMessengerEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDebugUtilsMessengerEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Instance::destroy( DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function <vkDestroyDebugUtilsMessengerEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ), + static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), + static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), + reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSubmitDebugUtilsMessageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSubmitDebugUtilsMessageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + DebugUtilsMessageTypeFlagsEXT messageTypes, + const DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" ); +# endif + + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), + static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), + reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<AndroidHardwareBufferPropertiesANDROID>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && + "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); +# endif + + AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast<Result>( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetAndroidHardwareBufferPropertiesANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAndroidHardwareBufferPropertiesANDROID.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && + "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + AndroidHardwareBufferPropertiesANDROID & properties = structureChain.template get<AndroidHardwareBufferPropertiesANDROID>(); + Result result = static_cast<Result>( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryAndroidHardwareBufferANDROID, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryAndroidHardwareBufferANDROID.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type + Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID && + "Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" ); +# endif + + struct AHardwareBuffer * buffer; + Result result = static_cast<Result>( + d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + + return detail::createResultValueType( result, std::move( buffer ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createExecutionGraphPipelinesAMDX( PipelineCache pipelineCache, + uint32_t createInfoCount, + const ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const AllocationCallbacks * pAllocator, + Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( m_device ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfoCount, + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipeline *>( pPipelines ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDX( PipelineCache pipelineCache, + ArrayProxy<const ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); + Result result = + static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDX( PipelineCache pipelineCache, + ArrayProxy<const ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); + Result result = + static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> + Device::createExecutionGraphPipelineAMDX( PipelineCache pipelineCache, + const ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<Pipeline>( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( PipelineCache pipelineCache, + ArrayProxy<const ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = + static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( PipelineCache pipelineCache, + ArrayProxy<const ExecutionGraphPipelineCreateInfoAMDX> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = + static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateExecutionGraphPipelinesAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExecutionGraphPipelinesAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> + Device::createExecutionGraphPipelineAMDXUnique( PipelineCache pipelineCache, + const ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function <vkCreateExecutionGraphPipelinesAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<UniqueHandle<Pipeline, Dispatch>>( + result, UniqueHandle<Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getExecutionGraphPipelineScratchSizeAMDX( Pipeline executionGraph, + ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast<VkDevice>( m_device ), + static_cast<VkPipeline>( executionGraph ), + reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExecutionGraphPipelineScratchSizeAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineScratchSizeAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ExecutionGraphPipelineScratchSizeAMDX>::type + Device::getExecutionGraphPipelineScratchSizeAMDX( Pipeline executionGraph, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + Result result = static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); + + return detail::createResultValueType( result, std::move( sizeInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getExecutionGraphPipelineNodeIndexAMDX( Pipeline executionGraph, + const PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ), + static_cast<VkPipeline>( executionGraph ), + reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), + pNodeIndex ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExecutionGraphPipelineNodeIndexAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExecutionGraphPipelineNodeIndexAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type + Device::getExecutionGraphPipelineNodeIndexAMDX( Pipeline executionGraph, const PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + uint32_t nodeIndex; + Result result = static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); + + return detail::createResultValueType( result, std::move( nodeIndex ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdInitializeGraphScratchMemoryAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdInitializeGraphScratchMemoryAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( Pipeline executionGraph, + DeviceAddress scratch, + DeviceSize scratchSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipeline>( executionGraph ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ) ); + } + + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( DeviceAddress scratch, + DeviceSize scratchSize, + const DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchGraphAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( DeviceAddress scratch, + DeviceSize scratchSize, + const DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + d.vkCmdDispatchGraphAMDX( m_commandBuffer, + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( DeviceAddress scratch, + DeviceSize scratchSize, + const DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchGraphIndirectAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( DeviceAddress scratch, + DeviceSize scratchSize, + const DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" ); +# endif + + d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchGraphIndirectCountAMDX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchGraphIndirectCountAMDX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( DeviceAddress scratch, + DeviceSize scratchSize, + DeviceAddress countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectCountAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( scratch ), + static_cast<VkDeviceSize>( scratchSize ), + static_cast<VkDeviceAddress>( countInfo ) ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" ); +# endif + + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, + MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), + static_cast<VkSampleCountFlagBits>( samples ), + reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceMultisamplePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" ); +# endif + + MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) ); + + return multisampleProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_memory_requirements2 === + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && + "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetImageMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageMemoryRequirements2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && + "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && + "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetBufferMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferMemoryRequirements2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && + "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && + "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetImageSparseMemoryRequirements2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSparseMemoryRequirements2KHR.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && + "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<AccelerationStructureKHR>::type Device::createAccelerationStructureKHR( + const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + AccelerationStructureKHR accelerationStructure; + Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + + return detail::createResultValueType( result, std::move( accelerationStructure ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<AccelerationStructureKHR, Dispatch>>::type + Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function <vkCreateAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + AccelerationStructureKHR accelerationStructure; + Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<AccelerationStructureKHR, Dispatch>( accelerationStructure, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( AccelerationStructureKHR accelerationStructure, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureKHR>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( AccelerationStructureKHR accelerationStructure, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + d.vkDestroyAccelerationStructureKHR( + m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( AccelerationStructureKHR accelerationStructure, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureKHR>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( AccelerationStructureKHR accelerationStructure, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function <vkDestroyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + d.vkDestroyAccelerationStructureKHR( + m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, + const AccelerationStructureBuildGeometryInfoKHR * pInfos, + const AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + infoCount, + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), + reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructuresKHR( ArrayProxy<const AccelerationStructureBuildGeometryInfoKHR> const & infos, + ArrayProxy<const AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + infos.size(), + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), + reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const AccelerationStructureBuildGeometryInfoKHR * pInfos, + const DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + infoCount, + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), + reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructuresIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( ArrayProxy<const AccelerationStructureBuildGeometryInfoKHR> const & infos, + ArrayProxy<const DeviceAddress> const & indirectDeviceAddresses, + ArrayProxy<const uint32_t> const & indirectStrides, + ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR && + "Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + infos.size(), + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), + reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::buildAccelerationStructuresKHR( DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const AccelerationStructureBuildGeometryInfoKHR * pInfos, + const AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBuildAccelerationStructuresKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + infoCount, + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), + reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBuildAccelerationStructuresKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildAccelerationStructuresKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::buildAccelerationStructuresKHR( DeferredOperationKHR deferredOperation, + ArrayProxy<const AccelerationStructureBuildGeometryInfoKHR> const & infos, + ArrayProxy<const AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + Result result = static_cast<Result>( + d.vkBuildAccelerationStructuresKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + infos.size(), + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), + reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyAccelerationStructureKHR( + m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyAccelerationStructureToMemoryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( DeferredOperationKHR deferredOperation, + const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR && + "Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( DeferredOperationKHR deferredOperation, + const CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( DeferredOperationKHR deferredOperation, + const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR && + "Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const AccelerationStructureKHR * pAccelerationStructures, + QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ), + accelerationStructureCount, + reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), + static_cast<VkQueryType>( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type + Device::writeAccelerationStructuresPropertiesKHR( + ArrayProxy<const AccelerationStructureKHR> const & accelerationStructures, QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && + "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = + static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), + static_cast<VkQueryType>( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast<void *>( data.data() ), + stride ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteAccelerationStructuresPropertiesKHR.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR( + ArrayProxy<const AccelerationStructureKHR> const & accelerationStructures, QueryType queryType, size_t stride, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && + "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + DataType data; + Result result = + static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), + static_cast<VkQueryType>( queryType ), + sizeof( DataType ), + reinterpret_cast<void *>( &data ), + stride ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyAccelerationStructureToMemoryKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR && + "Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToAccelerationStructureKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR && + "Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureDeviceAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureDeviceAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR && + "Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + VkDeviceAddress result = + d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) ); + + return static_cast<DeviceAddress>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const AccelerationStructureKHR * pAccelerationStructures, + QueryType queryType, + QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + accelerationStructureCount, + reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), + static_cast<VkQueryType>( queryType ), + static_cast<VkQueryPool>( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const AccelerationStructureKHR> const & accelerationStructures, + QueryType queryType, + QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), + static_cast<VkQueryType>( queryType ), + static_cast<VkQueryPool>( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR * pVersionInfo, + AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), + reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceAccelerationStructureCompatibilityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" ); +# endif + + AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), + reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( AccelerationStructureBuildTypeKHR buildType, + const AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureBuildSizesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureBuildSizesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( AccelerationStructureBuildTypeKHR buildType, + const AccelerationStructureBuildGeometryInfoKHR & buildInfo, + ArrayProxy<const uint32_t> const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR && + "Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( m_device, + static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), + reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdTraceRaysKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + uint32_t createInfoCount, + const RayTracingPipelineCreateInfoKHR * pCreateInfos, + const AllocationCallbacks * pAllocator, + Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfoCount, + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipeline *>( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createRayTracingPipelinesKHR( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createRayTracingPipelinesKHR( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<Pipeline>( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoKHR> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> + Device::createRayTracingPipelineKHRUnique( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function <vkCreateRayTracingPipelinesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<UniqueHandle<Pipeline, Dispatch>>( + result, UniqueHandle<Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( + Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type + Device::getRayTracingShaderGroupHandlesKHR( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && + "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesKHR.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getRayTracingShaderGroupHandleKHR( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && + "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getRayTracingCaptureReplayShaderGroupHandleKHR( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), + static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdTraceRaysIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const StridedDeviceAddressRegionKHR & missShaderBindingTable, + const StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const StridedDeviceAddressRegionKHR & callableShaderBindingTable, + DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" ); +# endif + + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), + reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), + static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupStackSizeKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( Pipeline pipeline, + uint32_t group, + ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) ); + } + + // wrapper function for command vkCmdSetRayTracingPipelineStackSizeKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRayTracingPipelineStackSizeKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( + const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && + "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + + return detail::createResultValueType( result, std::move( ycbcrConversion ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSamplerYcbcrConversionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion, Dispatch>>::type + Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && + "Function <vkCreateSamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<SamplerYcbcrConversion, Dispatch>( ycbcrConversion, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversionKHR( static_cast<VkDevice>( m_device ), + static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroySamplerYcbcrConversionKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroySamplerYcbcrConversionKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR && + "Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" ); +# endif + + d.vkDestroySamplerYcbcrConversionKHR( + m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === + + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBindBufferMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindBufferMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindBufferMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindBufferMemory2KHR( ArrayProxy<const BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); +# endif + + Result result = + static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBindImageMemory2KHR( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindImageMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindImageMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindImageMemory2KHR( ArrayProxy<const BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" ); +# endif + + Result result = + static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === + + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( Image image, + ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageDrmFormatModifierPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ImageDrmFormatModifierPropertiesEXT>::type + Device::getImageDrmFormatModifierPropertiesEXT( Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT && + "Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" ); +# endif + + ImageDrmFormatModifierPropertiesEXT properties; + Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_validation_cache === + + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateValidationCacheEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ValidationCacheEXT>::type + Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); +# endif + + ValidationCacheEXT validationCache; + Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + + return detail::createResultValueType( result, std::move( validationCache ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ValidationCacheEXT, Dispatch>>::type Device::createValidationCacheEXTUnique( + const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function <vkCreateValidationCacheEXT> requires <VK_EXT_validation_cache>" ); +# endif + + ValidationCacheEXT validationCache; + Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<ValidationCacheEXT, Dispatch>( validationCache, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); +# endif + + d.vkDestroyValidationCacheEXT( + m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( ValidationCacheEXT validationCache, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyValidationCacheEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function <vkDestroyValidationCacheEXT> requires <VK_EXT_validation_cache>" ); +# endif + + d.vkDestroyValidationCacheEXT( + m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ), + static_cast<VkValidationCacheEXT>( dstCache ), + srcCacheCount, + reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMergeValidationCachesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMergeValidationCachesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy<const ValidationCacheEXT> const & srcCaches, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" ); +# endif + + Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( + m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetValidationCacheDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> data; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetValidationCacheDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetValidationCacheDataEXT.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_shading_rate_image === + + // wrapper function for command vkCmdBindShadingRateImageNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadingRateImageNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( ImageView imageView, ImageLayout imageLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadingRateImageNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); + } + + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportShadingRatePaletteNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportShadingRatePaletteNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportShadingRatePaletteNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, + ArrayProxy<const ShadingRatePaletteNV> const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" ); +# endif + + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCoarseSampleOrderNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoarseSampleOrderNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( CoarseSampleOrderTypeNV sampleOrderType, + ArrayProxy<const CoarseSampleOrderCustomNV> const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" ); +# endif + + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV * pCreateInfo, + const AllocationCallbacks * pAllocator, + AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateAccelerationStructureNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<AccelerationStructureNV>::type Device::createAccelerationStructureNV( + const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); +# endif + + AccelerationStructureNV accelerationStructure; + Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + + return detail::createResultValueType( result, std::move( accelerationStructure ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<AccelerationStructureNV, Dispatch>>::type + Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function <vkCreateAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); +# endif + + AccelerationStructureNV accelerationStructure; + Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<AccelerationStructureNV, Dispatch>( accelerationStructure, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureNV>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( AccelerationStructureNV accelerationStructure, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); +# endif + + d.vkDestroyAccelerationStructureNV( + m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( AccelerationStructureNV accelerationStructure, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureNV>( accelerationStructure ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( AccelerationStructureNV accelerationStructure, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function <vkDestroyAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); +# endif + + d.vkDestroyAccelerationStructureNV( + m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV * pInfo, + MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( + const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && + "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); +# endif + + MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), + reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetAccelerationStructureMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && + "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2KHR & memoryRequirements = structureChain.template get<MemoryRequirements2KHR>(); + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), + reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, + const BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( + static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindAccelerationStructureMemoryNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindAccelerationStructureMemoryNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindAccelerationStructureMemoryNV( ArrayProxy<const BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" ); +# endif + + Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV * pInfo, + Buffer instanceData, + DeviceSize instanceOffset, + Bool32 update, + AccelerationStructureNV dst, + AccelerationStructureNV src, + Buffer scratch, + DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), + static_cast<VkBuffer>( instanceData ), + static_cast<VkDeviceSize>( instanceOffset ), + static_cast<VkBool32>( update ), + static_cast<VkAccelerationStructureNV>( dst ), + static_cast<VkAccelerationStructureNV>( src ), + static_cast<VkBuffer>( scratch ), + static_cast<VkDeviceSize>( scratchOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, + Buffer instanceData, + DeviceSize instanceOffset, + Bool32 update, + AccelerationStructureNV dst, + AccelerationStructureNV src, + Buffer scratch, + DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" ); +# endif + + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), + static_cast<VkBuffer>( instanceData ), + static_cast<VkDeviceSize>( instanceOffset ), + static_cast<VkBool32>( update ), + static_cast<VkAccelerationStructureNV>( dst ), + static_cast<VkAccelerationStructureNV>( src ), + static_cast<VkBuffer>( scratch ), + static_cast<VkDeviceSize>( scratchOffset ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyAccelerationStructureNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyAccelerationStructureNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( AccelerationStructureNV dst, + AccelerationStructureNV src, + CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkAccelerationStructureNV>( dst ), + static_cast<VkAccelerationStructureNV>( src ), + static_cast<VkCopyAccelerationStructureModeKHR>( mode ) ); + } + + // wrapper function for command vkCmdTraceRaysNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( Buffer raygenShaderBindingTableBuffer, + DeviceSize raygenShaderBindingOffset, + Buffer missShaderBindingTableBuffer, + DeviceSize missShaderBindingOffset, + DeviceSize missShaderBindingStride, + Buffer hitShaderBindingTableBuffer, + DeviceSize hitShaderBindingOffset, + DeviceSize hitShaderBindingStride, + Buffer callableShaderBindingTableBuffer, + DeviceSize callableShaderBindingOffset, + DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), + static_cast<VkDeviceSize>( raygenShaderBindingOffset ), + static_cast<VkBuffer>( missShaderBindingTableBuffer ), + static_cast<VkDeviceSize>( missShaderBindingOffset ), + static_cast<VkDeviceSize>( missShaderBindingStride ), + static_cast<VkBuffer>( hitShaderBindingTableBuffer ), + static_cast<VkDeviceSize>( hitShaderBindingOffset ), + static_cast<VkDeviceSize>( hitShaderBindingStride ), + static_cast<VkBuffer>( callableShaderBindingTableBuffer ), + static_cast<VkDeviceSize>( callableShaderBindingOffset ), + static_cast<VkDeviceSize>( callableShaderBindingStride ), + width, + height, + depth ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, + uint32_t createInfoCount, + const RayTracingPipelineCreateInfoNV * pCreateInfos, + const AllocationCallbacks * pAllocator, + Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( m_device ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfoCount, + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipeline *>( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoNV> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createRayTracingPipelinesNV( PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoNV> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( PipelineCache pipelineCache, + const RayTracingPipelineCreateInfoNV & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<Pipeline>( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoNV> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( PipelineCache pipelineCache, + ArrayProxy<const RayTracingPipelineCreateInfoNV> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateRayTracingPipelinesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateRayTracingPipelinesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( + PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function <vkCreateRayTracingPipelinesNV> requires <VK_NV_ray_tracing>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<UniqueHandle<Pipeline, Dispatch>>( + result, UniqueHandle<Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( + Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( + static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type + Device::getRayTracingShaderGroupHandlesNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && + "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetRayTracingShaderGroupHandlesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRayTracingShaderGroupHandlesNV.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getRayTracingShaderGroupHandleNV( Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && + "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( + static_cast<VkDevice>( m_device ), static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type + Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetAccelerationStructureHandleNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureHandleNV.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getAccelerationStructureHandleNV( AccelerationStructureNV accelerationStructure, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const AccelerationStructureNV * pAccelerationStructures, + QueryType queryType, + QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + accelerationStructureCount, + reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), + static_cast<VkQueryType>( queryType ), + static_cast<VkQueryPool>( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteAccelerationStructuresPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const AccelerationStructureNV> const & accelerationStructures, + QueryType queryType, + QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV && + "Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" ); +# endif + + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), + static_cast<VkQueryType>( queryType ), + static_cast<VkQueryPool>( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( pipeline ), shader ) ); + } +#else + // wrapper function for command vkCompileDeferredNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCompileDeferredNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::compileDeferredNV( Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" ); +# endif + + Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo * pCreateInfo, + DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && + "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); +# endif + + DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); + + return support; + } + + // wrapper function for command vkGetDescriptorSetLayoutSupportKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSupportKHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && + "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + DescriptorSetLayoutSupport & support = structureChain.template get<DescriptorSetLayoutSupport>(); + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_draw_indirect_count === + + // wrapper function for command vkCmdDrawIndirectCountKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndirectCountKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + // wrapper function for command vkCmdDrawIndexedIndirectCountKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawIndexedIndirectCountKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ), + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + pHostPointer, + reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryHostPointerPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryHostPointerPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<MemoryHostPointerPropertiesEXT>::type + Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" ); +# endif + + MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = + static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + pHostPointer, + reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_buffer_marker === + + // wrapper function for command vkCmdWriteBufferMarkerAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarkerAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( + PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineStageFlagBits>( pipelineStage ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + marker ); + } + + // wrapper function for command vkCmdWriteBufferMarker2AMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteBufferMarker2AMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( + PipelineStageFlags2 stage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineStageFlags2>( stage ), + static_cast<VkBuffer>( dstBuffer ), + static_cast<VkDeviceSize>( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template <typename TimeDomainKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, TimeDomainKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainKHR, TimeDomainKHRAllocator>>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::vector<TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return detail::createResultValueType( result, std::move( timeDomains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html + template <typename TimeDomainKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, TimeDomainKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainKHR, TimeDomainKHRAllocator>>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::vector<TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return detail::createResultValueType( result, std::move( timeDomains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ), + timestampCount, + reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template <typename Uint64_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type + Device::getCalibratedTimestampsEXT( ArrayProxy<const CalibratedTimestampInfoKHR> const & timestampInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template <typename Uint64_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type + Device::getCalibratedTimestampsEXT( ArrayProxy<const CalibratedTimestampInfoKHR> const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type + Device::getCalibratedTimestampEXT( const CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::pair<uint64_t, uint64_t> data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + Result result = static_cast<Result>( + d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( + Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const Bool32 * pExclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetExclusiveScissorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + ArrayProxy<const Bool32> const & exclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" ); +# endif + + d.vkCmdSetExclusiveScissorEnableNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetExclusiveScissorNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExclusiveScissorNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + ArrayProxy<const Rect2D> const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" ); +# endif + + d.vkCmdSetExclusiveScissorNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), pCheckpointMarker ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCheckpointNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCheckpointNV.html + template <typename CheckpointMarkerType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" ); +# endif + + d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, CheckpointDataNV * pCheckpointData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template <typename CheckpointDataNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, CheckpointDataNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); +# endif + + std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + // wrapper function for command vkGetQueueCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointDataNV.html + template <typename CheckpointDataNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CheckpointDataNVAllocator::value_type, CheckpointDataNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" ); +# endif + + std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, CheckpointData2NV * pCheckpointData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template <typename CheckpointData2NVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, CheckpointData2NV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_NV_device_diagnostic_checkpoints>" ); +# endif + + std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + // wrapper function for command vkGetQueueCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetQueueCheckpointData2NV.html + template <typename CheckpointData2NVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CheckpointData2NVAllocator::value_type, CheckpointData2NV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator> + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_NV_device_diagnostic_checkpoints>" ); +# endif + + std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_timeline_semaphore === + + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreCounterValueKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreCounterValueKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( Semaphore semaphore, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); +# endif + + uint64_t value; + Result result = static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + + return detail::createResultValueType( result, std::move( value ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitSemaphoresKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitSemaphoresKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); +# endif + + Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSignalSemaphoreKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSignalSemaphoreKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" ); +# endif + + Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkInitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkInitializePerformanceApiINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = + static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUninitializePerformanceApiINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUninitializePerformanceApiINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) ); + } + + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceMarkerINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = + static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceStreamMarkerINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceStreamMarkerINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = static_cast<Result>( + d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetPerformanceOverrideINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPerformanceOverrideINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = + static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), + reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PerformanceConfigurationINTEL>::type + Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + PerformanceConfigurationINTEL configuration; + Result result = + static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), + reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + + return detail::createResultValueType( result, std::move( configuration ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkAcquirePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquirePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PerformanceConfigurationINTEL, Dispatch>>::type + Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function <vkAcquirePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + PerformanceConfigurationINTEL configuration; + Result result = + static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), + reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<PerformanceConfigurationINTEL, Dispatch>( configuration, detail::ObjectRelease<Device, Dispatch>( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + } +#else + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::releasePerformanceConfigurationINTEL( PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + } +#else + // wrapper function for command vkReleasePerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleasePerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::release( PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function <vkReleasePerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ), static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + } +#else + // wrapper function for command vkQueueSetPerformanceConfigurationINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSetPerformanceConfigurationINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Queue::setPerformanceConfigurationINTEL( PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL && + "Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( PerformanceParameterTypeINTEL parameter, + PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPerformanceParameterINTEL( + static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPerformanceParameterINTEL, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPerformanceParameterINTEL.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PerformanceValueINTEL>::type + Device::getPerformanceParameterINTEL( PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" ); +# endif + + PerformanceValueINTEL value; + Result result = static_cast<Result>( d.vkGetPerformanceParameterINTEL( + m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return detail::createResultValueType( result, std::move( value ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === + + // wrapper function for command vkSetLocalDimmingAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLocalDimmingAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( SwapchainKHR swapChain, Bool32 localDimmingEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLocalDimmingAMD( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) ); + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( + const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateImagePipeSurfaceFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateImagePipeSurfaceFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( + const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function <vkCreateImagePipeSurfaceFUCHSIA> requires <VK_FUCHSIA_imagepipe_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateMetalSurfaceEXT( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMetalSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMetalSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createMetalSurfaceEXTUnique( + const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function <vkCreateMetalSurfaceEXT> requires <VK_EXT_metal_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, + PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + pFragmentShadingRateCount, + reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); +# endif + + std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates; + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return detail::createResultValueType( result, std::move( fragmentShadingRates ) ); + } + + // wrapper function for command vkGetPhysicalDeviceFragmentShadingRatesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceFragmentShadingRatesKHR.html + template < + typename PhysicalDeviceFragmentShadingRateKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceFragmentShadingRateKHRAllocator::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type + PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" ); +# endif + + std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( + physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return detail::createResultValueType( result, std::move( fragmentShadingRates ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const Extent2D * pFragmentSize, + const FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkExtent2D *>( pFragmentSize ), + reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetFragmentShadingRateKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const Extent2D & fragmentSize, + const FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" ); +# endif + + d.vkCmdSetFragmentShadingRateKHR( + m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering_local_read === + + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( pLocationInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingAttachmentLocationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingAttachmentLocationsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && + "Function <vkCmdSetRenderingAttachmentLocationsKHR> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast<const VkRenderingAttachmentLocationInfo *>( &locationInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( const RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( pInputAttachmentIndexInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetRenderingInputAttachmentIndicesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRenderingInputAttachmentIndicesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( const RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && + "Function <vkCmdSetRenderingInputAttachmentIndicesKHR> requires <VK_KHR_dynamic_rendering_local_read> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInputAttachmentIndexInfo *>( &inputAttachmentIndexInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<DeviceAddress>( + d.vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddressEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT && + "Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); + + return static_cast<DeviceAddress>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_tooling_info === + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template <typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, PhysicalDeviceToolProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && + "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); +# endif + + std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; + uint32_t toolCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return detail::createResultValueType( result, std::move( toolProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceToolPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html + template <typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PhysicalDeviceToolPropertiesAllocator::value_type, PhysicalDeviceToolProperties>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type + PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && + "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" ); +# endif + + std::vector<PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return detail::createResultValueType( result, std::move( toolProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForPresentKHR( SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); + } +#else + // wrapper function for command vkWaitForPresentKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresentKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::waitForPresentKHR( SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" ); +# endif + + Result result = static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", + { Result::eSuccess, Result::eTimeout, Result::eSuboptimalKHR, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", { Result::eSuccess, Result::eTimeout, Result::eSuboptimalKHR } ); +# endif + + return static_cast<Result>( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template <typename CooperativeMatrixPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, CooperativeMatrixPropertiesNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); +# endif + + std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesNV.html + template <typename CooperativeMatrixPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeMatrixPropertiesNVAllocator::value_type, CooperativeMatrixPropertiesNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" ); +# endif + + std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_coverage_reduction_mode === + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template < + typename FramebufferMixedSamplesCombinationNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type, FramebufferMixedSamplesCombinationNV>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); +# endif + + std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations; + uint32_t combinationCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return detail::createResultValueType( result, std::move( combinations ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html + template < + typename FramebufferMixedSamplesCombinationNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename FramebufferMixedSamplesCombinationNVAllocator::value_type, FramebufferMixedSamplesCombinationNV>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" ); +# endif + + std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return detail::createResultValueType( result, std::move( combinations ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template <typename PresentModeKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, PresentModeKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); +# endif + + std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + &presentModeCount, + reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return detail::createResultValueType( result, std::move( presentModes ) ); + } + + // wrapper function for command vkGetPhysicalDeviceSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html + template <typename PresentModeKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PresentModeKHRAllocator::value_type, PresentModeKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type + PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); +# endif + + std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), + &presentModeCount, + reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return detail::createResultValueType( result, std::move( presentModes ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); + } +# else + // wrapper function for command vkAcquireFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireFullScreenExclusiveModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::acquireFullScreenExclusiveModeEXT( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); +# endif + + Result result = static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + + return detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ) ) ); + } +# else + // wrapper function for command vkReleaseFullScreenExclusiveModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseFullScreenExclusiveModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::releaseFullScreenExclusiveModeEXT( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" ); +# endif + + Result result = static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + + return detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), + reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceGroupSurfacePresentModes2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DeviceGroupPresentModeFlagsKHR>::type + Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT && + "Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" ); +# endif + + DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + + return detail::createResultValueType( result, std::move( modes ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( + const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateHeadlessSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateHeadlessSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( + const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function <vkCreateHeadlessSurfaceEXT> requires <VK_EXT_headless_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_buffer_device_address === + + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<DeviceAddress>( + d.vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferDeviceAddressKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferDeviceAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR && + "Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); + + return static_cast<DeviceAddress>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR && + "Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMemoryOpaqueCaptureAddressKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" ); +# endif + + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + // wrapper function for command vkResetQueryPoolEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkResetQueryPoolEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::resetQueryPoolEXT( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + // wrapper function for command vkCmdSetCullModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCullModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) ); + } + + // wrapper function for command vkCmdSetFrontFaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFrontFaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) ); + } + + // wrapper function for command vkCmdSetPrimitiveTopologyEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveTopologyEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( PrimitiveTopology primitiveTopology, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) ); + } + + // wrapper function for command vkCmdSetViewportWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const Viewport * pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWithCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy<const Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT && + "Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetScissorWithCountEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetScissorWithCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy<const Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT && + "Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const Buffer * pBuffers, + const DeviceSize * pOffsets, + const DeviceSize * pSizes, + const DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast<const VkBuffer *>( pBuffers ), + reinterpret_cast<const VkDeviceSize *>( pOffsets ), + reinterpret_cast<const VkDeviceSize *>( pSizes ), + reinterpret_cast<const VkDeviceSize *>( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindVertexBuffers2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindVertexBuffers2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + ArrayProxy<const Buffer> const & buffers, + ArrayProxy<const DeviceSize> const & offsets, + ArrayProxy<const DeviceSize> const & sizes, + ArrayProxy<const DeviceSize> const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT && + "Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast<const VkBuffer *>( buffers.data() ), + reinterpret_cast<const VkDeviceSize *>( offsets.data() ), + reinterpret_cast<const VkDeviceSize *>( sizes.data() ), + reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthTestEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) ); + } + + // wrapper function for command vkCmdSetDepthWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthWriteEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) ); + } + + // wrapper function for command vkCmdSetDepthCompareOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthCompareOpEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) ); + } + + // wrapper function for command vkCmdSetDepthBoundsTestEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBoundsTestEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( Bool32 depthBoundsTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilTestEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilTestEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) ); + } + + // wrapper function for command vkCmdSetStencilOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetStencilOpEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( + StencilFaceFlags faceMask, StencilOp failOp, StencilOp passOp, StencilOp depthFailOp, CompareOp compareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkStencilFaceFlags>( faceMask ), + static_cast<VkStencilOp>( failOp ), + static_cast<VkStencilOp>( passOp ), + static_cast<VkStencilOp>( depthFailOp ), + static_cast<VkCompareOp>( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + // wrapper function for command vkCreateDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const AllocationCallbacks * pAllocator, + DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDeferredOperationKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DeferredOperationKHR>::type + Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); +# endif + + DeferredOperationKHR deferredOperation; + Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( + m_device, reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + + return detail::createResultValueType( result, std::move( deferredOperation ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDeferredOperationKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeferredOperationKHR, Dispatch>>::type + Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function <vkCreateDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); +# endif + + DeferredOperationKHR deferredOperation; + Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( + m_device, reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DeferredOperationKHR, Dispatch>( deferredOperation, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyDeferredOperationKHR( DeferredOperationKHR operation, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( + static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( DeferredOperationKHR operation, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); +# endif + + d.vkDestroyDeferredOperationKHR( + m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( DeferredOperationKHR operation, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( + static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDeferredOperationKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDeferredOperationKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function <vkDestroyDeferredOperationKHR> requires <VK_KHR_deferred_host_operations>" ); +# endif + + d.vkDestroyDeferredOperationKHR( + m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeferredOperationMaxConcurrencyKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationMaxConcurrencyKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) ); + } +#else + // wrapper function for command vkGetDeferredOperationResultKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeferredOperationResultKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( DeferredOperationKHR operation, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" ); +# endif + + Result result = static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", { Result::eSuccess, Result::eNotReady } ); + + return static_cast<Result>( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( operation ) ) ); + } +#else + // wrapper function for command vkDeferredOperationJoinKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDeferredOperationJoinKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( DeferredOperationKHR operation, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" ); +# endif + + Result result = static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", { Result::eSuccess, Result::eThreadDoneKHR, Result::eThreadIdleKHR } ); + + return static_cast<Result>( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), + pExecutableCount, + reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template < + typename PipelineExecutablePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, PipelineExecutablePropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type + Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && + "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); +# endif + + std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties; + uint32_t executableCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), + &executableCount, + reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPipelineExecutablePropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutablePropertiesKHR.html + template < + typename PipelineExecutablePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineExecutablePropertiesKHRAllocator::value_type, PipelineExecutablePropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type + Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && + "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" ); +# endif + + std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), + &executableCount, + reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), + pStatisticCount, + reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template < + typename PipelineExecutableStatisticKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, PipelineExecutableStatisticKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type + Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && + "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); +# endif + + std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics; + uint32_t statisticCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( + m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), + &statisticCount, + reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return detail::createResultValueType( result, std::move( statistics ) ); + } + + // wrapper function for command vkGetPipelineExecutableStatisticsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableStatisticsKHR.html + template < + typename PipelineExecutableStatisticKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineExecutableStatisticKHRAllocator::value_type, PipelineExecutableStatisticKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type + Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && + "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" ); +# endif + + std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( + m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), + &statisticCount, + reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return detail::createResultValueType( result, std::move( statistics ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template <typename PipelineExecutableInternalRepresentationKHRAllocator, + typename Dispatch, + typename std::enable_if< + std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type, PipelineExecutableInternalRepresentationKHR>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && + "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); +# endif + + std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> internalRepresentations; + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return detail::createResultValueType( result, std::move( internalRepresentations ) ); + } + + // wrapper function for command vkGetPipelineExecutableInternalRepresentationsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html + template <typename PipelineExecutableInternalRepresentationKHRAllocator, + typename Dispatch, + typename std::enable_if< + std::is_same<typename PipelineExecutableInternalRepresentationKHRAllocator::value_type, PipelineExecutableInternalRepresentationKHR>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && + "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" ); +# endif + + std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> internalRepresentations( + pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return detail::createResultValueType( result, std::move( internalRepresentations ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_image_copy === + + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkCopyMemoryToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyMemoryToImageInfo *>( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToImageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::copyMemoryToImageEXT( const CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfo *>( ©MemoryToImageInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkCopyImageToMemoryEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToMemoryInfo *>( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::copyImageToMemoryEXT( const CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfo *>( ©ImageToMemoryInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkCopyImageToImageEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkCopyImageToImageInfo *>( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyImageToImageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyImageToImageEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::copyImageToImageEXT( const CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfo *>( ©ImageToImageInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, + const HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkTransitionImageLayoutEXT( + static_cast<VkDevice>( m_device ), transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkTransitionImageLayoutEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkTransitionImageLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::transitionImageLayoutEXT( ArrayProxy<const HostImageLayoutTransitionInfo> const & transitions, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( + d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfo *>( transitions.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( Image image, + const ImageSubresource2 * pSubresource, + SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ), + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), + reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE SubresourceLayout2 Device::getImageSubresourceLayout2EXT( Image image, + const ImageSubresource2 & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2EXT && + "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( &subresource ), + reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2EXT.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getImageSubresourceLayout2EXT( Image image, const ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2EXT && + "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + SubresourceLayout2 & layout = structureChain.template get<SubresourceLayout2>(); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( &subresource ), + reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_map_memory2 === + + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkMapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfo *>( pMemoryMapInfo ), ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkMapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkMapMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory2KHR( const MemoryMapInfo & memoryMapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); +# endif + + void * pData; + Result result = static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfo *>( &memoryMapInfo ), &pData ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + + return detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfo *>( pMemoryUnmapInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUnmapMemory2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkUnmapMemory2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::unmapMemory2KHR( const MemoryUnmapInfo & memoryUnmapInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2> or <VK_VERSION_1_4>" ); +# endif + + Result result = static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfo *>( &memoryUnmapInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === + + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const ReleaseSwapchainImagesInfoKHR * pReleaseInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkReleaseSwapchainImagesEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoKHR *>( pReleaseInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseSwapchainImagesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::releaseSwapchainImagesEXT( const ReleaseSwapchainImagesInfoKHR & releaseInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && + "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1> or <VK_KHR_swapchain_maintenance1>" ); +# endif + + Result result = static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoKHR *>( &releaseInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && + "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), + reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsNV.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && + "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), + reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPreprocessGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); +# endif + + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( Bool32 isPreprocessed, + const GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBool32>( isPreprocessed ), + reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteGeneratedCommandsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( Bool32 isPreprocessed, + const GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" ); +# endif + + d.vkCmdExecuteGeneratedCommandsNV( + m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindPipelineShaderGroupNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindPipelineShaderGroupNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( PipelineBindPoint pipelineBindPoint, + Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipelineShaderGroupNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex ); + } + + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const AllocationCallbacks * pAllocator, + IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<IndirectCommandsLayoutNV>::type Device::createIndirectCommandsLayoutNV( + const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); +# endif + + IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + + return detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNV, Dispatch>>::type + Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function <vkCreateIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); +# endif + + IndirectCommandsLayoutNV indirectCommandsLayout; + Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<IndirectCommandsLayoutNV, Dispatch>( indirectCommandsLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( IndirectCommandsLayoutNV indirectCommandsLayout, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( IndirectCommandsLayoutNV indirectCommandsLayout, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); +# endif + + d.vkDestroyIndirectCommandsLayoutNV( + m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( IndirectCommandsLayoutNV indirectCommandsLayout, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNV indirectCommandsLayout, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function <vkDestroyIndirectCommandsLayoutNV> requires <VK_NV_device_generated_commands>" ); +# endif + + d.vkDestroyIndirectCommandsLayoutNV( + m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_depth_bias_control === + + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const DepthBiasInfoEXT * pDepthBiasInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDepthBias2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBias2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const DepthBiasInfoEXT & depthBiasInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" ); +# endif + + d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, + DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) ) ); + } +#else + // wrapper function for command vkAcquireDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireDrmDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); +# endif + + Result result = static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, DisplayKHR * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DisplayKHR>::type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); +# endif + + DisplayKHR display; + Result result = static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); + + return detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetDrmDisplayEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDrmDisplayEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DisplayKHR, Dispatch>>::type + PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function <vkGetDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" ); +# endif + + DisplayKHR display; + Result result = static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); + + return detail::createResultValueType( result, UniqueHandle<DisplayKHR, Dispatch>( display, detail::ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === + + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfo * pCreateInfo, + const AllocationCallbacks * pAllocator, + PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PrivateDataSlot>::type + Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + PrivateDataSlot privateDataSlot; + Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + + return detail::createResultValueType( result, std::move( privateDataSlot ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePrivateDataSlotEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PrivateDataSlot, Dispatch>>::type Device::createPrivateDataSlotEXTUnique( + const PrivateDataSlotCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function <vkCreatePrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + PrivateDataSlot privateDataSlot; + Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<PrivateDataSlot, Dispatch>( privateDataSlot, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPrivateDataSlotEXT( PrivateDataSlot privateDataSlot, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlotEXT( + static_cast<VkDevice>( m_device ), static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPrivateDataSlotEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPrivateDataSlotEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( PrivateDataSlot privateDataSlot, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + d.vkDestroyPrivateDataSlotEXT( + m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( + ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkSetPrivateDataEXT( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); + } +#else + // wrapper function for command vkSetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetPrivateDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setPrivateDataEXT( ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + Result result = static_cast<Result>( + d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( + ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateDataEXT( + static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPrivateDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPrivateDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getPrivateDataEXT( ObjectType objectType_, uint64_t objectHandle, PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" ); +# endif + + uint64_t data; + d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); + + return data; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ), + reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VideoEncodeQualityLevelPropertiesKHR>::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + Result result = static_cast<Result>( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), + reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return detail::createResultValueType( result, std::move( qualityLevelProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = structureChain.template get<VideoEncodeQualityLevelPropertiesKHR>(); + Result result = static_cast<Result>( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ), + reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getEncodedVideoSessionParametersKHR( const VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ), + pDataSize, + pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::pair<VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type + Device::getEncodedVideoSessionParametersKHR( const VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + std::pair<VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; + VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::pair<VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type + Device::getEncodedVideoSessionParametersKHR( const VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + std::pair<VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template <typename X, + typename Y, + typename... Z, + typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type + Device::getEncodedVideoSessionParametersKHR( const VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + std::pair<StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_; + VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first.template get<VideoEncodeSessionParametersFeedbackInfoKHR>(); + std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetEncodedVideoSessionParametersKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetEncodedVideoSessionParametersKHR.html + template <typename X, + typename Y, + typename... Z, + typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type + Device::getEncodedVideoSessionParametersKHR( const VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + std::pair<StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first.template get<VideoEncodeSessionParametersFeedbackInfoKHR>(); + std::vector<uint8_t, Uint8_tAllocator> & data = data_.second; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast<Result>( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ), + reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ), + &dataSize, + reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR * pEncodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEncodeVideoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEncodeVideoKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" ); +# endif + + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const CudaModuleCreateInfoNV * pCreateInfo, + const AllocationCallbacks * pAllocator, + CudaModuleNV * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateCudaModuleNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCudaModuleCreateInfoNV *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkCudaModuleNV *>( pModule ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<CudaModuleNV>::type + Device::createCudaModuleNV( const CudaModuleCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + CudaModuleNV module; + Result result = static_cast<Result>( d.vkCreateCudaModuleNV( m_device, + reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); + + return detail::createResultValueType( result, std::move( module ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CudaModuleNV, Dispatch>>::type + Device::createCudaModuleNVUnique( const CudaModuleCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function <vkCreateCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + CudaModuleNV module; + Result result = static_cast<Result>( d.vkCreateCudaModuleNV( m_device, + reinterpret_cast<const VkCudaModuleCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCudaModuleNV *>( &module ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<CudaModuleNV, Dispatch>( module, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getCudaModuleCacheNV( CudaModuleNV module, size_t * pCacheSize, void * pCacheData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetCudaModuleCacheNV( static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), pCacheSize, pCacheData ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getCudaModuleCacheNV( CudaModuleNV module, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> cacheData; + size_t cacheSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast<Result>( + d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return detail::createResultValueType( result, std::move( cacheData ) ); + } + + // wrapper function for command vkGetCudaModuleCacheNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCudaModuleCacheNV.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getCudaModuleCacheNV( CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function <vkGetCudaModuleCacheNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> cacheData( uint8_tAllocator ); + size_t cacheSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast<Result>( + d.vkGetCudaModuleCacheNV( m_device, static_cast<VkCudaModuleNV>( module ), &cacheSize, reinterpret_cast<void *>( cacheData.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return detail::createResultValueType( result, std::move( cacheData ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const CudaFunctionCreateInfoNV * pCreateInfo, + const AllocationCallbacks * pAllocator, + CudaFunctionNV * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateCudaFunctionNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkCudaFunctionNV *>( pFunction ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<CudaFunctionNV>::type + Device::createCudaFunctionNV( const CudaFunctionCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + CudaFunctionNV function; + Result result = static_cast<Result>( d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); + + return detail::createResultValueType( result, std::move( function ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CudaFunctionNV, Dispatch>>::type + Device::createCudaFunctionNVUnique( const CudaFunctionCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function <vkCreateCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + CudaFunctionNV function; + Result result = static_cast<Result>( d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast<const VkCudaFunctionCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkCudaFunctionNV *>( &function ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<CudaFunctionNV, Dispatch>( function, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCudaModuleNV( CudaModuleNV module, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaModuleNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCudaModuleNV( CudaModuleNV module, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CudaModuleNV module, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaModuleNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaModuleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaModuleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CudaModuleNV module, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function <vkDestroyCudaModuleNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + d.vkDestroyCudaModuleNV( m_device, static_cast<VkCudaModuleNV>( module ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCudaFunctionNV( CudaFunctionNV function, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaFunctionNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyCudaFunctionNV( CudaFunctionNV function, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CudaFunctionNV function, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaFunctionNV( + static_cast<VkDevice>( m_device ), static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyCudaFunctionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyCudaFunctionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( CudaFunctionNV function, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function <vkDestroyCudaFunctionNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + d.vkDestroyCudaFunctionNV( m_device, static_cast<VkCudaFunctionNV>( function ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const CudaLaunchInfoNV * pLaunchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCudaLaunchKernelNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCudaLaunchInfoNV *>( pLaunchInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCudaLaunchKernelNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCudaLaunchKernelNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const CudaLaunchInfoNV & launchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function <vkCmdCudaLaunchKernelNV> requires <VK_NV_cuda_kernel_launch>" ); +# endif + + d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast<const VkCudaLaunchInfoNV *>( &launchInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_QCOM_tile_shading === + + // wrapper function for command vkCmdDispatchTileQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchTileQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchTileQCOM( const DispatchTileInfoQCOM * pDispatchTileInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchTileQCOM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDispatchTileInfoQCOM *>( pDispatchTileInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchTileQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchTileQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchTileQCOM( const DispatchTileInfoQCOM & dispatchTileInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchTileQCOM && "Function <vkCmdDispatchTileQCOM> requires <VK_QCOM_tile_shading>" ); +# endif + + d.vkCmdDispatchTileQCOM( m_commandBuffer, reinterpret_cast<const VkDispatchTileInfoQCOM *>( &dispatchTileInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginPerTileExecutionQCOM( const PerTileBeginInfoQCOM * pPerTileBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginPerTileExecutionQCOM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerTileBeginInfoQCOM *>( pPerTileBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginPerTileExecutionQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginPerTileExecutionQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginPerTileExecutionQCOM( const PerTileBeginInfoQCOM & perTileBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginPerTileExecutionQCOM && "Function <vkCmdBeginPerTileExecutionQCOM> requires <VK_QCOM_tile_shading>" ); +# endif + + d.vkCmdBeginPerTileExecutionQCOM( m_commandBuffer, reinterpret_cast<const VkPerTileBeginInfoQCOM *>( &perTileBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endPerTileExecutionQCOM( const PerTileEndInfoQCOM * pPerTileEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndPerTileExecutionQCOM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPerTileEndInfoQCOM *>( pPerTileEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndPerTileExecutionQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndPerTileExecutionQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endPerTileExecutionQCOM( const PerTileEndInfoQCOM & perTileEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndPerTileExecutionQCOM && "Function <vkCmdEndPerTileExecutionQCOM> requires <VK_QCOM_tile_shading>" ); +# endif + + d.vkCmdEndPerTileExecutionQCOM( m_commandBuffer, reinterpret_cast<const VkPerTileEndInfoQCOM *>( &perTileEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( ExportMetalObjectsInfoEXT * pMetalObjectsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkExportMetalObjectsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkExportMetalObjectsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( ExportMetalObjectsInfoEXT & metalObjectsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" ); +# endif + + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( Event event, const DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2KHR( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetEvent2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( Event event, const DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResetEvent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResetEvent2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( Event event, PipelineStageFlags2 stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); + } + + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const Event * pEvents, + const DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + eventCount, + reinterpret_cast<const VkEvent *>( pEvents ), + reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWaitEvents2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWaitEvents2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( ArrayProxy<const Event> const & events, + ArrayProxy<const DependencyInfo> const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast<const VkEvent *>( events.data() ), + reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPipelineBarrier2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPipelineBarrier2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteTimestamp2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteTimestamp2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::writeTimestamp2KHR( PipelineStageFlags2 stage, QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2KHR( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); + } + + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Queue::submit2KHR( uint32_t submitCount, const SubmitInfo2 * pSubmits, Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkQueueSubmit2KHR( static_cast<VkQueue>( m_queue ), submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSubmit2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSubmit2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Queue::submit2KHR( ArrayProxy<const SubmitInfo2> const & submits, Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" ); +# endif + + Result result = static_cast<Result>( + d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_descriptor_buffer === + + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getDescriptorSetLayoutSizeEXT( DescriptorSetLayout layout, DeviceSize * pLayoutSizeInBytes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSizeEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutSizeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DeviceSize Device::getDescriptorSetLayoutSizeEXT( DescriptorSetLayout layout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DeviceSize layoutSizeInBytes; + d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) ); + + return layoutSizeInBytes; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( DescriptorSetLayout layout, + uint32_t binding, + DeviceSize * pOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutBindingOffsetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( DescriptorSetLayout layout, + uint32_t binding, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT && + "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DeviceSize offset; + d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) ); + + return offset; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getDescriptorEXT( const DescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getDescriptorEXT( const DescriptorGetInfoEXT & descriptorInfo, size_t dataSize, void * pDescriptor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor ); + } + + // wrapper function for command vkGetDescriptorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorEXT.html + template <typename DescriptorType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DescriptorType descriptor; + d.vkGetDescriptorEXT( + m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) ); + + return descriptor; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, + const DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBuffersEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorBuffersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBuffersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( ArrayProxy<const DescriptorBufferBindingInfoEXT> const & bindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + firstSet, + setCount, + pBufferIndices, + reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDescriptorBufferOffsetsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t firstSet, + ArrayProxy<const uint32_t> const & bufferIndices, + ArrayProxy<const DeviceSize> const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); +# else + if ( bufferIndices.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + static_cast<VkPipelineBindPoint>( pipelineBindPoint ), + static_cast<VkPipelineLayout>( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplersEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( PipelineBindPoint pipelineBindPoint, + PipelineLayout layout, + uint32_t set, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set ); + } + + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( const BufferCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getBufferOpaqueCaptureDescriptorDataEXT( const BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT && + "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DataType data; + Result result = static_cast<Result>( + d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( const ImageCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getImageOpaqueCaptureDescriptorDataEXT( const ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT && + "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DataType data; + Result result = static_cast<Result>( + d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( const ImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageViewOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT && + "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DataType data; + Result result = static_cast<Result>( + d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( const SamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSamplerOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT && + "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DataType data; + Result result = static_cast<Result>( + d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const AccelerationStructureCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && + "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_fragment_shading_rate_enums === + + // wrapper function for command vkCmdSetFragmentShadingRateEnumNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetFragmentShadingRateEnumNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( FragmentShadingRateNV shadingRate, + const FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkFragmentShadingRateNV>( shadingRate ), + reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) ); + } + + //=== VK_EXT_mesh_shader === + + // wrapper function for command vkCmdDrawMeshTasksEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( + Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); + } + + // wrapper function for command vkCmdDrawMeshTasksIndirectCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMeshTasksIndirectCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( Buffer buffer, + DeviceSize offset, + Buffer countBuffer, + DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkBuffer>( countBuffer ), + static_cast<VkDeviceSize>( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_KHR_copy_commands2 === + + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBuffer2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyBufferToImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyBufferToImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyImageToBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyImageToBuffer2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBlitImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBlitImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdResolveImage2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdResolveImage2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" ); +# endif + + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_fault === + + // wrapper function for command vkGetDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceFaultInfoEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( DeviceFaultCountsEXT * pFaultCounts, + DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDeviceFaultInfoEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) ); + } + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( display ) ) ); + } +# else + // wrapper function for command vkAcquireWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireWinrtDisplayNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + PhysicalDevice::acquireWinrtDisplayNV( DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); +# endif + + Result result = static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + + return detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DisplayKHR>::type PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); +# endif + + DisplayKHR display; + Result result = static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + + return detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkGetWinrtDisplayNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetWinrtDisplayNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DisplayKHR, Dispatch>>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function <vkGetWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" ); +# endif + + DisplayKHR display; + Result result = static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); + + return detail::createResultValueType( result, UniqueHandle<DisplayKHR, Dispatch>( display, detail::ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type Instance::createDirectFBSurfaceEXT( + const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDirectFBSurfaceEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDirectFBSurfaceEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createDirectFBSurfaceEXTUnique( + const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function <vkCreateDirectFBSurfaceEXT> requires <VK_EXT_directfb_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, dfb ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceDirectFBPresentationSupportEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceDirectFBPresentationSupportEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + + return static_cast<Bool32>( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + vertexBindingDescriptionCount, + reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetVertexInputEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetVertexInputEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( ArrayProxy<const VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, + ArrayProxy<const VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" ); +# endif + + d.vkCmdSetVertexInputEXT( m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandleFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type + Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); +# endif + + zx_handle_t zirconHandle; + Result result = static_cast<Result>( + d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + + return detail::createResultValueType( result, std::move( zirconHandle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandlePropertiesFUCHSIA( ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + zirconHandle, + reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryZirconHandlePropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryZirconHandlePropertiesFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<MemoryZirconHandlePropertiesFUCHSIA>::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" ); +# endif + + MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + Result result = static_cast<Result>( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + zirconHandle, + reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + + return detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkImportSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkImportSemaphoreZirconHandleFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::importSemaphoreZirconHandleFUCHSIA( const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); +# endif + + Result result = static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + + return detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSemaphoreZirconHandleFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSemaphoreZirconHandleFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type + Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" ); +# endif + + zx_handle_t zirconHandle; + Result result = static_cast<Result>( + d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + + return detail::createResultValueType( result, std::move( zirconHandle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferCollectionFUCHSIA( const BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const AllocationCallbacks * pAllocator, + BufferCollectionFUCHSIA * pCollection, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<BufferCollectionFUCHSIA>::type Device::createBufferCollectionFUCHSIA( + const BufferCollectionCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + BufferCollectionFUCHSIA collection; + Result result = static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, + reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); + + return detail::createResultValueType( result, std::move( collection ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferCollectionFUCHSIA, Dispatch>>::type + Device::createBufferCollectionFUCHSIAUnique( const BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function <vkCreateBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + BufferCollectionFUCHSIA collection; + Result result = static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device, + reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<BufferCollectionFUCHSIA, Dispatch>( collection, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionImageConstraintsFUCHSIA( BufferCollectionFUCHSIA collection, + const ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkBufferCollectionFUCHSIA>( collection ), + reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetBufferCollectionImageConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionImageConstraintsFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setBufferCollectionImageConstraintsFUCHSIA( + BufferCollectionFUCHSIA collection, const ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + Result result = static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); + + return detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setBufferCollectionBufferConstraintsFUCHSIA( + BufferCollectionFUCHSIA collection, const BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkBufferCollectionFUCHSIA>( collection ), + reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetBufferCollectionBufferConstraintsFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetBufferCollectionBufferConstraintsFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setBufferCollectionBufferConstraintsFUCHSIA( + BufferCollectionFUCHSIA collection, const BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + Result result = static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); + + return detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( BufferCollectionFUCHSIA collection, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( BufferCollectionFUCHSIA collection, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + d.vkDestroyBufferCollectionFUCHSIA( + m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( BufferCollectionFUCHSIA collection, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + static_cast<VkDevice>( m_device ), static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyBufferCollectionFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyBufferCollectionFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( BufferCollectionFUCHSIA collection, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function <vkDestroyBufferCollectionFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + d.vkDestroyBufferCollectionFUCHSIA( + m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferCollectionPropertiesFUCHSIA( BufferCollectionFUCHSIA collection, + BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ), + static_cast<VkBufferCollectionFUCHSIA>( collection ), + reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetBufferCollectionPropertiesFUCHSIA, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetBufferCollectionPropertiesFUCHSIA.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<BufferCollectionPropertiesFUCHSIA>::type + Device::getBufferCollectionPropertiesFUCHSIA( BufferCollectionFUCHSIA collection, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA && + "Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" ); +# endif + + BufferCollectionPropertiesFUCHSIA properties; + Result result = static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( RenderPass renderpass, + Extent2D * pMaxWorkgroupSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<Extent2D>::type Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( RenderPass renderpass, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" ); +# endif + + Extent2D maxWorkgroupSize; + Result result = static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + + return detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSubpassShadingHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSubpassShadingHUAWEI.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) ); + } + + //=== VK_HUAWEI_invocation_mask === + + // wrapper function for command vkCmdBindInvocationMaskHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindInvocationMaskHUAWEI.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( ImageView imageView, ImageLayout imageLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindInvocationMaskHUAWEI( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryRemoteAddressNV( const MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + RemoteAddressNV * pAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), + reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryRemoteAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryRemoteAddressNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<RemoteAddressNV>::type + Device::getMemoryRemoteAddressNV( const MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" ); +# endif + + RemoteAddressNV address; + Result result = static_cast<Result>( d.vkGetMemoryRemoteAddressNV( + m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + + return detail::createResultValueType( result, std::move( address ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pipeline_properties === + + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const PipelineInfoEXT * pPipelineInfo, + BaseOutStructure * pPipelineProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), + reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelinePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelinePropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<BaseOutStructure>::type + Device::getPipelinePropertiesEXT( const PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" ); +# endif + + BaseOutStructure pipelineProperties; + Result result = static_cast<Result>( d.vkGetPipelinePropertiesEXT( + m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + + return detail::createResultValueType( result, std::move( pipelineProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_extended_dynamic_state2 === + + // wrapper function for command vkCmdSetPatchControlPointsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPatchControlPointsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints ); + } + + // wrapper function for command vkCmdSetRasterizerDiscardEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizerDiscardEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( Bool32 rasterizerDiscardEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) ); + } + + // wrapper function for command vkCmdSetDepthBiasEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthBiasEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) ); + } + + // wrapper function for command vkCmdSetLogicOpEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) ); + } + + // wrapper function for command vkCmdSetPrimitiveRestartEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPrimitiveRestartEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( Bool32 primitiveRestartEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateScreenSurfaceQNX( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateScreenSurfaceQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateScreenSurfaceQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type Instance::createScreenSurfaceQNXUnique( + const ScreenSurfaceCreateInfoQNX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function <vkCreateScreenSurfaceQNX> requires <VK_QNX_screen_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Bool32>( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, window ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceScreenPresentationSupportQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceScreenPresentationSupportQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" ); +# endif + + VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + + return static_cast<Bool32>( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, const Bool32 * pColorWriteEnables, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorWriteEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const Bool32> const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" ); +# endif + + d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_maintenance1 === + + // wrapper function for command vkCmdTraceRaysIndirect2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdTraceRaysIndirect2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( DeviceAddress indirectDeviceAddress, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) ); + } + + //=== VK_EXT_multi_draw === + + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, + const MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + drawCount, + reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), + instanceCount, + firstInstance, + stride ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDrawMultiEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( StridedArrayProxy<const MultiDrawInfoEXT> const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" ); +# endif + + d.vkCmdDrawMultiEXT( m_commandBuffer, + vertexInfo.size(), + reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, + const MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + drawCount, + reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), + instanceCount, + firstInstance, + stride, + pVertexOffset ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDrawMultiIndexedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawMultiIndexedEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( StridedArrayProxy<const MultiDrawIndexedInfoEXT> const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional<const int32_t> vertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" ); +# endif + + d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, + indexInfo.size(), + reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + vertexOffset.get() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const MicromapCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + MicromapEXT * pMicromap, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateMicromapEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<MicromapEXT>::type + Device::createMicromapEXT( const MicromapCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + MicromapEXT micromap; + Result result = static_cast<Result>( d.vkCreateMicromapEXT( m_device, + reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); + + return detail::createResultValueType( result, std::move( micromap ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<MicromapEXT, Dispatch>>::type + Device::createMicromapEXTUnique( const MicromapCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function <vkCreateMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + MicromapEXT micromap; + Result result = static_cast<Result>( d.vkCreateMicromapEXT( m_device, + reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkMicromapEXT *>( µmap ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<MicromapEXT, Dispatch>( micromap, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyMicromapEXT( MicromapEXT micromap, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyMicromapEXT( + static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyMicromapEXT( MicromapEXT micromap, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( MicromapEXT micromap, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyMicromapEXT( + static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( MicromapEXT micromap, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function <vkDestroyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::buildMicromapsEXT( uint32_t infoCount, const MicromapBuildInfoEXT * pInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildMicromapsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildMicromapsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( ArrayProxy<const MicromapBuildInfoEXT> const & infos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + infoCount, + reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBuildMicromapsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBuildMicromapsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( DeferredOperationKHR deferredOperation, + ArrayProxy<const MicromapBuildInfoEXT> const & infos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + Result result = static_cast<Result>( d.vkBuildMicromapsEXT( + m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( DeferredOperationKHR deferredOperation, + const CopyMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCopyMicromapEXT( + static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( DeferredOperationKHR deferredOperation, + const CopyMicromapInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + Result result = static_cast<Result>( + d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( DeferredOperationKHR deferredOperation, + const CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMicromapToMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( DeferredOperationKHR deferredOperation, + const CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyMicromapToMemoryEXT( + m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( DeferredOperationKHR deferredOperation, + const CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCopyMemoryToMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( DeferredOperationKHR deferredOperation, + const CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + Result result = static_cast<Result>( d.vkCopyMemoryToMicromapEXT( + m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ) ); + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { Result::eSuccess, Result::eOperationDeferredKHR, Result::eOperationNotDeferredKHR } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const MicromapEXT * pMicromaps, + QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ), + micromapCount, + reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), + static_cast<VkQueryType>( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template <typename DataType, + typename DataTypeAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataTypeAllocator::value_type, DataType>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::writeMicromapsPropertiesEXT( + ArrayProxy<const MicromapEXT> const & micromaps, QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); + Result result = static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), + static_cast<VkQueryType>( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast<void *>( data.data() ), + stride ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkWriteMicromapsPropertiesEXT.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::writeMicromapsPropertyEXT( ArrayProxy<const MicromapEXT> const & micromaps, QueryType queryType, size_t stride, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + DataType data; + Result result = static_cast<Result>( d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), + static_cast<VkQueryType>( queryType ), + sizeof( DataType ), + reinterpret_cast<void *>( &data ), + stride ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const CopyMicromapToMemoryInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMicromapToMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMicromapToMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const CopyMemoryToMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToMicromapEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToMicromapEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const MicromapEXT * pMicromaps, + QueryType queryType, + QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + micromapCount, + reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), + static_cast<VkQueryType>( queryType ), + static_cast<VkQueryPool>( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdWriteMicromapsPropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdWriteMicromapsPropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( ArrayProxy<const MicromapEXT> const & micromaps, + QueryType queryType, + QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + micromaps.size(), + reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ), + static_cast<VkQueryType>( queryType ), + static_cast<VkQueryPool>( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const MicromapVersionInfoEXT * pVersionInfo, + AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ), + reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceMicromapCompatibilityEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceMicromapCompatibilityEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceMicromapCompatibilityEXT( m_device, + reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ), + reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( AccelerationStructureBuildTypeKHR buildType, + const MicromapBuildInfoEXT * pBuildInfo, + MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ), + static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), + reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ), + reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMicromapBuildSizesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMicromapBuildSizesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MicromapBuildSizesInfoEXT Device::getMicromapBuildSizesEXT( AccelerationStructureBuildTypeKHR buildType, + const MicromapBuildInfoEXT & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" ); +# endif + + MicromapBuildSizesInfoEXT sizeInfo; + d.vkGetMicromapBuildSizesEXT( m_device, + static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), + reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ), + reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_cluster_culling_shader === + + // wrapper function for command vkCmdDrawClusterHUAWEI, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterHUAWEI.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + // wrapper function for command vkCmdDrawClusterIndirectHUAWEI, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDrawClusterIndirectHUAWEI.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( Buffer buffer, DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawClusterIndirectHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); + } + + //=== VK_EXT_pageable_device_local_memory === + + // wrapper function for command vkSetDeviceMemoryPriorityEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetDeviceMemoryPriorityEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( memory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const DeviceBufferMemoryRequirements * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirementsKHR( const DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && + "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceBufferMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirementsKHR( const DeviceBufferMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && + "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const DeviceImageMemoryRequirements * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirementsKHR( const DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && + "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceImageMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageMemoryRequirementsKHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirementsKHR( const DeviceImageMemoryRequirements & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && + "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirementsKHR( const DeviceImageMemoryRequirements & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + // wrapper function for command vkGetDeviceImageSparseMemoryRequirementsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html + template < + typename SparseImageMemoryRequirements2Allocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename SparseImageMemoryRequirements2Allocator::value_type, SparseImageMemoryRequirements2>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> + Device::getImageSparseMemoryRequirementsKHR( const DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" ); +# endif + + std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === + + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const DescriptorSetBindingReferenceVALVE * pBindingReference, + DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ), + reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetLayoutHostMappingInfoVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetLayoutHostMappingInfoVALVE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( + const DescriptorSetBindingReferenceVALVE & bindingReference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE && + "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); +# endif + + DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ), + reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) ); + + return hostMapping; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getDescriptorSetHostMappingVALVE( DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( descriptorSet ), ppData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDescriptorSetHostMappingVALVE, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDescriptorSetHostMappingVALVE.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( DescriptorSet descriptorSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE && + "Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" ); +# endif + + void * pData; + d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData ); + + return pData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_copy_memory_indirect === + + // wrapper function for command vkCmdCopyMemoryIndirectNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryIndirectNV( DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride ); + } + + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Image dstImage, + ImageLayout dstImageLayout, + const ImageSubresourceLayers * pImageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( copyBufferAddress ), + copyCount, + stride, + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToImageIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( DeviceAddress copyBufferAddress, + uint32_t stride, + Image dstImage, + ImageLayout dstImageLayout, + ArrayProxy<const ImageSubresourceLayers> const & imageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" ); +# endif + + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast<VkDeviceAddress>( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast<VkImage>( dstImage ), + static_cast<VkImageLayout>( dstImageLayout ), + reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_memory_decompression === + + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, + const DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + decompressRegionCount, + reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecompressMemoryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( ArrayProxy<const DecompressMemoryRegionNV> const & decompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" ); +# endif + + d.vkCmdDecompressMemoryNV( + m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDecompressMemoryIndirectCountNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryIndirectCountNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( DeviceAddress indirectCommandsAddress, + DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDeviceAddress>( indirectCommandsAddress ), + static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), + stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const ComputePipelineCreateInfo * pCreateInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getPipelineIndirectMemoryRequirementsNV( const ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && + "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetPipelineIndirectMemoryRequirementsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectMemoryRequirementsNV.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getPipelineIndirectMemoryRequirementsNV( const ComputePipelineCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && + "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdUpdatePipelineIndirectBufferNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdUpdatePipelineIndirectBufferNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::updatePipelineIndirectBufferNV( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdatePipelineIndirectBufferNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); + } + + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<DeviceAddress>( + d.vkGetPipelineIndirectDeviceAddressNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const PipelineIndirectDeviceAddressInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV && + "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" ); +# endif + + VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) ); + + return static_cast<DeviceAddress>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_OHOS ) + //=== VK_OHOS_external_memory === + + // wrapper function for command vkGetNativeBufferPropertiesOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetNativeBufferPropertiesOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getNativeBufferPropertiesOHOS( const struct OH_NativeBuffer * buffer, + NativeBufferPropertiesOHOS * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetNativeBufferPropertiesOHOS( static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkNativeBufferPropertiesOHOS *>( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetNativeBufferPropertiesOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetNativeBufferPropertiesOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<NativeBufferPropertiesOHOS>::type + Device::getNativeBufferPropertiesOHOS( const struct OH_NativeBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetNativeBufferPropertiesOHOS && "Function <vkGetNativeBufferPropertiesOHOS> requires <VK_OHOS_external_memory>" ); +# endif + + NativeBufferPropertiesOHOS properties; + Result result = + static_cast<Result>( d.vkGetNativeBufferPropertiesOHOS( m_device, &buffer, reinterpret_cast<VkNativeBufferPropertiesOHOS *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getNativeBufferPropertiesOHOS" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetNativeBufferPropertiesOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetNativeBufferPropertiesOHOS.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + Device::getNativeBufferPropertiesOHOS( const struct OH_NativeBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetNativeBufferPropertiesOHOS && "Function <vkGetNativeBufferPropertiesOHOS> requires <VK_OHOS_external_memory>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + NativeBufferPropertiesOHOS & properties = structureChain.template get<NativeBufferPropertiesOHOS>(); + Result result = + static_cast<Result>( d.vkGetNativeBufferPropertiesOHOS( m_device, &buffer, reinterpret_cast<VkNativeBufferPropertiesOHOS *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getNativeBufferPropertiesOHOS" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryNativeBufferOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryNativeBufferOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryNativeBufferOHOS( const MemoryGetNativeBufferInfoOHOS * pInfo, + struct OH_NativeBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetMemoryNativeBufferOHOS( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetNativeBufferInfoOHOS *>( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryNativeBufferOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryNativeBufferOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct OH_NativeBuffer *>::type + Device::getMemoryNativeBufferOHOS( const MemoryGetNativeBufferInfoOHOS & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryNativeBufferOHOS && "Function <vkGetMemoryNativeBufferOHOS> requires <VK_OHOS_external_memory>" ); +# endif + + struct OH_NativeBuffer * buffer; + Result result = + static_cast<Result>( d.vkGetMemoryNativeBufferOHOS( m_device, reinterpret_cast<const VkMemoryGetNativeBufferInfoOHOS *>( &info ), &buffer ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryNativeBufferOHOS" ); + + return detail::createResultValueType( result, std::move( buffer ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_OHOS*/ + + //=== VK_EXT_extended_dynamic_state3 === + + // wrapper function for command vkCmdSetDepthClampEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) ); + } + + // wrapper function for command vkCmdSetPolygonModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetPolygonModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) ); + } + + // wrapper function for command vkCmdSetRasterizationSamplesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationSamplesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( SampleCountFlagBits rasterizationSamples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( rasterizationSamples ) ); + } + + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setSampleMaskEXT( SampleCountFlagBits samples, const SampleMask * pSampleMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleMaskEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetSampleMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleMaskEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( SampleCountFlagBits samples, + ArrayProxy<const SampleMask> const & sampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 ); +# else + if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetAlphaToCoverageEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToCoverageEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( Bool32 alphaToCoverageEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) ); + } + + // wrapper function for command vkCmdSetAlphaToOneEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAlphaToOneEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) ); + } + + // wrapper function for command vkCmdSetLogicOpEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLogicOpEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) ); + } + + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const Bool32 * pColorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEnableEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + ArrayProxy<const Bool32> const & colorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT && + "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendEquationEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendEquationEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + ArrayProxy<const ColorBlendEquationEXT> const & colorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT && + "Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetColorBlendEquationEXT( + m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const ColorComponentFlags * pColorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteMaskEXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorWriteMaskEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorWriteMaskEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + ArrayProxy<const ColorComponentFlags> const & colorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT && + "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetColorWriteMaskEXT( + m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetTessellationDomainOriginEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetTessellationDomainOriginEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( TessellationDomainOrigin domainOrigin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkTessellationDomainOrigin>( domainOrigin ) ); + } + + // wrapper function for command vkCmdSetRasterizationStreamEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRasterizationStreamEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream ); + } + + // wrapper function for command vkCmdSetConservativeRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetConservativeRasterizationModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT( ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) ); + } + + // wrapper function for command vkCmdSetExtraPrimitiveOverestimationSizeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetExtraPrimitiveOverestimationSizeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize ); + } + + // wrapper function for command vkCmdSetDepthClipEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) ); + } + + // wrapper function for command vkCmdSetSampleLocationsEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetSampleLocationsEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( Bool32 sampleLocationsEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) ); + } + + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetColorBlendAdvancedEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetColorBlendAdvancedEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + ArrayProxy<const ColorBlendAdvancedEXT> const & colorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT && + "Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetColorBlendAdvancedEXT( + m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetProvokingVertexModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetProvokingVertexModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( ProvokingVertexModeEXT provokingVertexMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) ); + } + + // wrapper function for command vkCmdSetLineRasterizationModeEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineRasterizationModeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) ); + } + + // wrapper function for command vkCmdSetLineStippleEnableEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) ); + } + + // wrapper function for command vkCmdSetDepthClipNegativeOneToOneEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClipNegativeOneToOneEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( Bool32 negativeOneToOne, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) ); + } + + // wrapper function for command vkCmdSetViewportWScalingEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportWScalingEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( Bool32 viewportWScalingEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) ); + } + + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportSwizzleNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetViewportSwizzleNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetViewportSwizzleNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + ArrayProxy<const ViewportSwizzleNV> const & viewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV && + "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetViewportSwizzleNV( + m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetCoverageToColorEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( Bool32 coverageToColorEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageToColorLocationNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageToColorLocationNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation ); + } + + // wrapper function for command vkCmdSetCoverageModulationModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationModeNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( CoverageModulationModeNV coverageModulationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) ); + } + + // wrapper function for command vkCmdSetCoverageModulationTableEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( Bool32 coverageModulationTableEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageModulationTableEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetCoverageModulationTableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageModulationTableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( ArrayProxy<const float> const & coverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && + "Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetShadingRateImageEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetShadingRateImageEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( Bool32 shadingRateImageEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) ); + } + + // wrapper function for command vkCmdSetRepresentativeFragmentTestEnableNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetRepresentativeFragmentTestEnableNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( Bool32 representativeFragmentTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( representativeFragmentTestEnable ) ); + } + + // wrapper function for command vkCmdSetCoverageReductionModeNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageReductionModeNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( CoverageReductionModeNV coverageReductionMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) ); + } + + //=== VK_ARM_tensors === + + // wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createTensorARM( const TensorCreateInfoARM * pCreateInfo, + const AllocationCallbacks * pAllocator, + TensorARM * pTensor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateTensorARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkTensorCreateInfoARM *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkTensorARM *>( pTensor ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<TensorARM>::type + Device::createTensorARM( const TensorCreateInfoARM & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateTensorARM && "Function <vkCreateTensorARM> requires <VK_ARM_tensors>" ); +# endif + + TensorARM tensor; + Result result = static_cast<Result>( d.vkCreateTensorARM( m_device, + reinterpret_cast<const VkTensorCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkTensorARM *>( &tensor ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorARM" ); + + return detail::createResultValueType( result, std::move( tensor ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<TensorARM, Dispatch>>::type + Device::createTensorARMUnique( const TensorCreateInfoARM & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateTensorARM && "Function <vkCreateTensorARM> requires <VK_ARM_tensors>" ); +# endif + + TensorARM tensor; + Result result = static_cast<Result>( d.vkCreateTensorARM( m_device, + reinterpret_cast<const VkTensorCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkTensorARM *>( &tensor ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorARMUnique" ); + + return detail::createResultValueType( result, UniqueHandle<TensorARM, Dispatch>( tensor, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyTensorARM( TensorARM tensor, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyTensorARM( + static_cast<VkDevice>( m_device ), static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyTensorARM( TensorARM tensor, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyTensorARM && "Function <vkDestroyTensorARM> requires <VK_ARM_tensors>" ); +# endif + + d.vkDestroyTensorARM( m_device, static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( TensorARM tensor, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyTensorARM( + static_cast<VkDevice>( m_device ), static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( TensorARM tensor, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyTensorARM && "Function <vkDestroyTensorARM> requires <VK_ARM_tensors>" ); +# endif + + d.vkDestroyTensorARM( m_device, static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createTensorViewARM( const TensorViewCreateInfoARM * pCreateInfo, + const AllocationCallbacks * pAllocator, + TensorViewARM * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateTensorViewARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkTensorViewCreateInfoARM *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkTensorViewARM *>( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<TensorViewARM>::type + Device::createTensorViewARM( const TensorViewCreateInfoARM & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateTensorViewARM && "Function <vkCreateTensorViewARM> requires <VK_ARM_tensors>" ); +# endif + + TensorViewARM view; + Result result = static_cast<Result>( d.vkCreateTensorViewARM( m_device, + reinterpret_cast<const VkTensorViewCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkTensorViewARM *>( &view ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorViewARM" ); + + return detail::createResultValueType( result, std::move( view ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<TensorViewARM, Dispatch>>::type + Device::createTensorViewARMUnique( const TensorViewCreateInfoARM & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateTensorViewARM && "Function <vkCreateTensorViewARM> requires <VK_ARM_tensors>" ); +# endif + + TensorViewARM view; + Result result = static_cast<Result>( d.vkCreateTensorViewARM( m_device, + reinterpret_cast<const VkTensorViewCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkTensorViewARM *>( &view ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorViewARMUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<TensorViewARM, Dispatch>( view, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyTensorViewARM( TensorViewARM tensorView, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyTensorViewARM( + static_cast<VkDevice>( m_device ), static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyTensorViewARM( TensorViewARM tensorView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyTensorViewARM && "Function <vkDestroyTensorViewARM> requires <VK_ARM_tensors>" ); +# endif + + d.vkDestroyTensorViewARM( m_device, static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( TensorViewARM tensorView, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyTensorViewARM( + static_cast<VkDevice>( m_device ), static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( TensorViewARM tensorView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyTensorViewARM && "Function <vkDestroyTensorViewARM> requires <VK_ARM_tensors>" ); +# endif + + d.vkDestroyTensorViewARM( m_device, static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetTensorMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getTensorMemoryRequirementsARM( const TensorMemoryRequirementsInfoARM * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetTensorMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getTensorMemoryRequirementsARM( const TensorMemoryRequirementsInfoARM & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorMemoryRequirementsARM && "Function <vkGetTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetTensorMemoryRequirementsARM( + m_device, reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetTensorMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getTensorMemoryRequirementsARM( const TensorMemoryRequirementsInfoARM & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorMemoryRequirementsARM && "Function <vkGetTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetTensorMemoryRequirementsARM( + m_device, reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindTensorMemoryARM( uint32_t bindInfoCount, + const BindTensorMemoryInfoARM * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkBindTensorMemoryARM( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindTensorMemoryInfoARM *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindTensorMemoryARM( ArrayProxy<const BindTensorMemoryInfoARM> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindTensorMemoryARM && "Function <vkBindTensorMemoryARM> requires <VK_ARM_tensors>" ); +# endif + + Result result = + static_cast<Result>( d.vkBindTensorMemoryARM( m_device, bindInfos.size(), reinterpret_cast<const VkBindTensorMemoryInfoARM *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindTensorMemoryARM" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getTensorMemoryRequirementsARM( const DeviceTensorMemoryRequirementsARM * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getTensorMemoryRequirementsARM( const DeviceTensorMemoryRequirementsARM & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceTensorMemoryRequirementsARM && "Function <vkGetDeviceTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetDeviceTensorMemoryRequirementsARM( + m_device, reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getTensorMemoryRequirementsARM( const DeviceTensorMemoryRequirementsARM & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceTensorMemoryRequirementsARM && "Function <vkGetDeviceTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetDeviceTensorMemoryRequirementsARM( + m_device, reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyTensorARM( const CopyTensorInfoARM * pCopyTensorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyTensorARM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyTensorInfoARM *>( pCopyTensorInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyTensorARM( const CopyTensorInfoARM & copyTensorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyTensorARM && "Function <vkCmdCopyTensorARM> requires <VK_ARM_tensors>" ); +# endif + + d.vkCmdCopyTensorARM( m_commandBuffer, reinterpret_cast<const VkCopyTensorInfoARM *>( ©TensorInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getExternalTensorPropertiesARM( const PhysicalDeviceExternalTensorInfoARM * pExternalTensorInfo, + ExternalTensorPropertiesARM * pExternalTensorProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalTensorPropertiesARM( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( pExternalTensorInfo ), + reinterpret_cast<VkExternalTensorPropertiesARM *>( pExternalTensorProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ExternalTensorPropertiesARM PhysicalDevice::getExternalTensorPropertiesARM( + const PhysicalDeviceExternalTensorInfoARM & externalTensorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalTensorPropertiesARM && + "Function <vkGetPhysicalDeviceExternalTensorPropertiesARM> requires <VK_ARM_tensors>" ); +# endif + + ExternalTensorPropertiesARM externalTensorProperties; + d.vkGetPhysicalDeviceExternalTensorPropertiesARM( m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( &externalTensorInfo ), + reinterpret_cast<VkExternalTensorPropertiesARM *>( &externalTensorProperties ) ); + + return externalTensorProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getTensorOpaqueCaptureDescriptorDataARM( const TensorCaptureDescriptorDataInfoARM * pInfo, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetTensorOpaqueCaptureDescriptorDataARM( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getTensorOpaqueCaptureDescriptorDataARM( const TensorCaptureDescriptorDataInfoARM & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorOpaqueCaptureDescriptorDataARM && "Function <vkGetTensorOpaqueCaptureDescriptorDataARM> requires <VK_ARM_tensors>" ); +# endif + + DataType data; + Result result = static_cast<Result>( + d.vkGetTensorOpaqueCaptureDescriptorDataARM( m_device, reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDescriptorDataARM" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getTensorViewOpaqueCaptureDescriptorDataARM( const TensorViewCaptureDescriptorDataInfoARM * pInfo, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetTensorViewOpaqueCaptureDescriptorDataARM( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type + Device::getTensorViewOpaqueCaptureDescriptorDataARM( const TensorViewCaptureDescriptorDataInfoARM & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetTensorViewOpaqueCaptureDescriptorDataARM && + "Function <vkGetTensorViewOpaqueCaptureDescriptorDataARM> requires <VK_ARM_tensors>" ); +# endif + + DataType data; + Result result = static_cast<Result>( + d.vkGetTensorViewOpaqueCaptureDescriptorDataARM( m_device, reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( &info ), &data ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorViewOpaqueCaptureDescriptorDataARM" ); + + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_module_identifier === + + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getShaderModuleIdentifierEXT( ShaderModule shaderModule, ShaderModuleIdentifierEXT * pIdentifier, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetShaderModuleIdentifierEXT( + static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderModuleIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ShaderModuleIdentifierEXT Device::getShaderModuleIdentifierEXT( ShaderModule shaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); +# endif + + ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); + + return identifier; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const ShaderModuleCreateInfo * pCreateInfo, + ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), + reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const ShaderModuleCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT && + "Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" ); +# endif + + ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleCreateInfoIdentifierEXT( + m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) ); + + return identifier; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_optical_flow === + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getOpticalFlowImageFormatsNV( const OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ), + pFormatCount, + reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template <typename OpticalFlowImageFormatPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, OpticalFlowImageFormatPropertiesNV>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); +# endif + + std::vector<OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties; + uint32_t formatCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html + template <typename OpticalFlowImageFormatPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename OpticalFlowImageFormatPropertiesNVAllocator::value_type, OpticalFlowImageFormatPropertiesNV>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" ); +# endif + + std::vector<OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties( + opticalFlowImageFormatPropertiesNVAllocator ); + uint32_t formatCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const OpticalFlowSessionCreateInfoNV * pCreateInfo, + const AllocationCallbacks * pAllocator, + OpticalFlowSessionNV * pSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<OpticalFlowSessionNV>::type Device::createOpticalFlowSessionNV( + const OpticalFlowSessionCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); +# endif + + OpticalFlowSessionNV session; + Result result = static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, + reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); + + return detail::createResultValueType( result, std::move( session ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateOpticalFlowSessionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<OpticalFlowSessionNV, Dispatch>>::type Device::createOpticalFlowSessionNVUnique( + const OpticalFlowSessionCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function <vkCreateOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); +# endif + + OpticalFlowSessionNV session; + Result result = static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device, + reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<OpticalFlowSessionNV, Dispatch>( session, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyOpticalFlowSessionNV( OpticalFlowSessionNV session, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyOpticalFlowSessionNV( + static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( OpticalFlowSessionNV session, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); +# endif + + d.vkDestroyOpticalFlowSessionNV( + m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( OpticalFlowSessionNV session, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyOpticalFlowSessionNV( + static_cast<VkDevice>( m_device ), static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyOpticalFlowSessionNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyOpticalFlowSessionNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( OpticalFlowSessionNV session, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function <vkDestroyOpticalFlowSessionNV> requires <VK_NV_optical_flow>" ); +# endif + + d.vkDestroyOpticalFlowSessionNV( + m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( OpticalFlowSessionNV session, + OpticalFlowSessionBindingPointNV bindingPoint, + ImageView view, + ImageLayout layout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ), + static_cast<VkOpticalFlowSessionNV>( session ), + static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), + static_cast<VkImageView>( view ), + static_cast<VkImageLayout>( layout ) ) ); + } +#else + // wrapper function for command vkBindOpticalFlowSessionImageNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindOpticalFlowSessionImageNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindOpticalFlowSessionImageNV( + OpticalFlowSessionNV session, OpticalFlowSessionBindingPointNV bindingPoint, ImageView view, ImageLayout layout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" ); +# endif + + Result result = static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device, + static_cast<VkOpticalFlowSessionNV>( session ), + static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ), + static_cast<VkImageView>( view ), + static_cast<VkImageLayout>( layout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); + + return detail::createResultValueType( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( OpticalFlowSessionNV session, + const OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkOpticalFlowSessionNV>( session ), + reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( OpticalFlowSessionNV session, + const OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" ); +# endif + + d.vkCmdOpticalFlowExecuteNV( + m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === + + // wrapper function for command vkCmdBindIndexBuffer2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindIndexBuffer2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::bindIndexBuffer2KHR( Buffer buffer, DeviceSize offset, DeviceSize size, IndexType indexType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBuffer>( buffer ), + static_cast<VkDeviceSize>( offset ), + static_cast<VkDeviceSize>( size ), + static_cast<VkIndexType>( indexType ) ); + } + + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const RenderingAreaInfo * pRenderingAreaInfo, + Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularityKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkRenderingAreaInfo *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetRenderingAreaGranularityKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetRenderingAreaGranularityKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Extent2D Device::getRenderingAreaGranularityKHR( const RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + Extent2D granularity; + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast<const VkRenderingAreaInfo *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getImageSubresourceLayoutKHR( const DeviceImageSubresourceInfo * pInfo, SubresourceLayout2 * pLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayoutKHR( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageSubresourceInfo *>( pInfo ), reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE SubresourceLayout2 Device::getImageSubresourceLayoutKHR( const DeviceImageSubresourceInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetDeviceImageSubresourceLayoutKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceImageSubresourceLayoutKHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageSubresourceLayoutKHR( const DeviceImageSubresourceInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + SubresourceLayout2 & layout = structureChain.template get<SubresourceLayout2>(); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast<const VkDeviceImageSubresourceInfo *>( &info ), reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( Image image, + const ImageSubresource2 * pSubresource, + SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ), + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( pSubresource ), + reinterpret_cast<VkSubresourceLayout2 *>( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE SubresourceLayout2 Device::getImageSubresourceLayout2KHR( Image image, + const ImageSubresource2 & subresource, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2KHR && + "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( &subresource ), + reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return layout; + } + + // wrapper function for command vkGetImageSubresourceLayout2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetImageSubresourceLayout2KHR.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getImageSubresourceLayout2KHR( Image image, const ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2KHR && + "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5> or <VK_VERSION_1_4>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + SubresourceLayout2 & layout = structureChain.template get<SubresourceLayout2>(); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast<VkImage>( image ), + reinterpret_cast<const VkImageSubresource2 *>( &subresource ), + reinterpret_cast<VkSubresourceLayout2 *>( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_anti_lag === + + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkAntiLagUpdateAMD( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAntiLagDataAMD *>( pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAntiLagUpdateAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAntiLagUpdateAMD.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function <vkAntiLagUpdateAMD> requires <VK_AMD_anti_lag>" ); +# endif + + d.vkAntiLagUpdateAMD( m_device, reinterpret_cast<const VkAntiLagDataAMD *>( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait2 === + + // wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresent2KHR( SwapchainKHR swapchain, + const PresentWait2InfoKHR * pPresentWait2Info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkWaitForPresent2KHR( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkPresentWait2InfoKHR *>( pPresentWait2Info ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresent2KHR( SwapchainKHR swapchain, + const PresentWait2InfoKHR & presentWait2Info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForPresent2KHR && "Function <vkWaitForPresent2KHR> requires <VK_KHR_present_wait2>" ); +# endif + + Result result = static_cast<Result>( + d.vkWaitForPresent2KHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkPresentWait2InfoKHR *>( &presentWait2Info ) ) ); + +# if defined( VULKAN_HPP_HANDLE_ERROR_OUT_OF_DATE_AS_SUCCESS ) + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresent2KHR", + { Result::eSuccess, Result::eTimeout, Result::eSuboptimalKHR, Result::eErrorOutOfDateKHR } ); +# else + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresent2KHR", { Result::eSuccess, Result::eTimeout, Result::eSuboptimalKHR } ); +# endif + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, + const ShaderCreateInfoEXT * pCreateInfos, + const AllocationCallbacks * pAllocator, + ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateShadersEXT( static_cast<VkDevice>( m_device ), + createInfoCount, + reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkShaderEXT *>( pShaders ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename ShaderEXTAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, ShaderEXT>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<ShaderEXT, ShaderEXTAllocator>> Device::createShadersEXT( + ArrayProxy<const ShaderCreateInfoEXT> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); +# endif + + std::vector<ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", { Result::eSuccess, Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue<std::vector<ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename ShaderEXTAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, ShaderEXT>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<ShaderEXT, ShaderEXTAllocator>> + Device::createShadersEXT( ArrayProxy<const ShaderCreateInfoEXT> const & createInfos, + Optional<const AllocationCallbacks> allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); +# endif + + std::vector<ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator ); + Result result = static_cast<Result>( d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", { Result::eSuccess, Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue<std::vector<ShaderEXT, ShaderEXTAllocator>>( result, std::move( shaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<ShaderEXT> + Device::createShaderEXT( const ShaderCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); +# endif + + ShaderEXT shader; + Result result = static_cast<Result>( d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderEXT *>( &shader ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", { Result::eSuccess, Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue<ShaderEXT>( result, std::move( shader ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<ShaderEXT, Dispatch>, ShaderEXTAllocator>> Device::createShadersEXTUnique( + ArrayProxy<const ShaderCreateInfoEXT> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); +# endif + + std::vector<ShaderEXT> shaders( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", { Result::eSuccess, Result::eIncompatibleShaderBinaryEXT } ); + std::vector<UniqueHandle<ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders; + uniqueShaders.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<ShaderEXT, Dispatch>, ShaderEXTAllocator>> + Device::createShadersEXTUnique( ArrayProxy<const ShaderCreateInfoEXT> const & createInfos, + Optional<const AllocationCallbacks> allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); +# endif + + std::vector<ShaderEXT> shaders( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", { Result::eSuccess, Result::eIncompatibleShaderBinaryEXT } ); + std::vector<UniqueHandle<ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); + uniqueShaders.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<ShaderEXT, Dispatch>, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); + } + + // wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<ShaderEXT, Dispatch>> + Device::createShaderEXTUnique( const ShaderCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function <vkCreateShadersEXT> requires <VK_EXT_shader_object>" ); +# endif + + ShaderEXT shader; + Result result = static_cast<Result>( d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkShaderEXT *>( &shader ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", { Result::eSuccess, Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue<UniqueHandle<ShaderEXT, Dispatch>>( + result, UniqueHandle<ShaderEXT, Dispatch>( shader, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyShaderEXT( ShaderEXT shader, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( + static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyShaderEXT( ShaderEXT shader, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); +# endif + + d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( ShaderEXT shader, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( + static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyShaderEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyShaderEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( ShaderEXT shader, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function <vkDestroyShaderEXT> requires <VK_EXT_shader_object>" ); +# endif + + d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderBinaryDataEXT( ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( shader ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getShaderBinaryDataEXT( ShaderEXT shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> data; + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return detail::createResultValueType( result, std::move( data ) ); + } + + // wrapper function for command vkGetShaderBinaryDataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderBinaryDataEXT.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type + Device::getShaderBinaryDataEXT( ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" ); +# endif + + std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = + static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, + const ShaderStageFlagBits * pStages, + const ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + stageCount, + reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), + reinterpret_cast<const VkShaderEXT *>( pShaders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindShadersEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( ArrayProxy<const ShaderStageFlagBits> const & stages, + ArrayProxy<const ShaderEXT> const & shaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindShadersEXT( m_commandBuffer, + stages.size(), + reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ), + reinterpret_cast<const VkShaderEXT *>( shaders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( DepthClampModeEXT depthClampMode, + const DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClampRangeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDepthClampModeEXT>( depthClampMode ), + reinterpret_cast<const VkDepthClampRangeEXT *>( pDepthClampRange ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDepthClampRangeEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( DepthClampModeEXT depthClampMode, + Optional<const DepthClampRangeEXT> depthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthClampRangeEXT && + "Function <vkCmdSetDepthClampRangeEXT> requires <VK_EXT_depth_clamp_control> or <VK_EXT_shader_object>" ); +# endif + + d.vkCmdSetDepthClampRangeEXT( + m_commandBuffer, static_cast<VkDepthClampModeEXT>( depthClampMode ), reinterpret_cast<const VkDepthClampRangeEXT *>( depthClampRange.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_pipeline_binary === + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const PipelineBinaryCreateInfoKHR * pCreateInfo, + const AllocationCallbacks * pAllocator, + PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreatePipelineBinariesKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( pBinaries ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template <typename PipelineBinaryKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, PipelineBinaryKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<PipelineBinaryKHR, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHR( const PipelineBinaryCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + std::vector<PipelineBinaryKHR, PipelineBinaryKHRAllocator> pipelineBinaries; + PipelineBinaryHandlesInfoKHR binaries; + Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + if ( result == Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + } + + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", { Result::eSuccess, Result::eIncomplete, Result::ePipelineBinaryMissingKHR } ); + + return ResultValue<std::vector<PipelineBinaryKHR, PipelineBinaryKHRAllocator>>( result, std::move( pipelineBinaries ) ); + } + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template <typename PipelineBinaryKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, PipelineBinaryKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<PipelineBinaryKHR, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHR( const PipelineBinaryCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + std::vector<PipelineBinaryKHR, PipelineBinaryKHRAllocator> pipelineBinaries( pipelineBinaryKHRAllocator ); + PipelineBinaryHandlesInfoKHR binaries; + Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + if ( result == Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + } + + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", { Result::eSuccess, Result::eIncomplete, Result::ePipelineBinaryMissingKHR } ); + + return ResultValue<std::vector<PipelineBinaryKHR, PipelineBinaryKHRAllocator>>( result, std::move( pipelineBinaries ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template <typename Dispatch, + typename PipelineBinaryKHRAllocator, + typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, UniqueHandle<PipelineBinaryKHR, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const PipelineBinaryCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + std::vector<PipelineBinaryKHR> pipelineBinaries; + PipelineBinaryHandlesInfoKHR binaries; + Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + if ( result == Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + } + + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { Result::eSuccess, Result::eIncomplete, Result::ePipelineBinaryMissingKHR } ); + std::vector<UniqueHandle<PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator> uniquePipelineBinaries; + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle<PipelineBinaryKHR, Dispatch>( pipelineBinary, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>>( result, std::move( uniquePipelineBinaries ) ); + } + + // wrapper function for command vkCreatePipelineBinariesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreatePipelineBinariesKHR.html + template <typename Dispatch, + typename PipelineBinaryKHRAllocator, + typename std::enable_if<std::is_same<typename PipelineBinaryKHRAllocator::value_type, UniqueHandle<PipelineBinaryKHR, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const PipelineBinaryCreateInfoKHR & createInfo, + Optional<const AllocationCallbacks> allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function <vkCreatePipelineBinariesKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + std::vector<PipelineBinaryKHR> pipelineBinaries; + PipelineBinaryHandlesInfoKHR binaries; + Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + if ( result == Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast<Result>( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast<const VkPipelineBinaryCreateInfoKHR *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipelineBinaryHandlesInfoKHR *>( &binaries ) ) ); + } + } + + detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { Result::eSuccess, Result::eIncomplete, Result::ePipelineBinaryMissingKHR } ); + std::vector<UniqueHandle<PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator> uniquePipelineBinaries( pipelineBinaryKHRAllocator ); + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle<PipelineBinaryKHR, Dispatch>( pipelineBinary, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<PipelineBinaryKHR, Dispatch>, PipelineBinaryKHRAllocator>>( result, std::move( uniquePipelineBinaries ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroyPipelineBinaryKHR( PipelineBinaryKHR pipelineBinary, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineBinaryKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( PipelineBinaryKHR pipelineBinary, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function <vkDestroyPipelineBinaryKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + d.vkDestroyPipelineBinaryKHR( + m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( PipelineBinaryKHR pipelineBinary, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineBinaryKHR( + static_cast<VkDevice>( m_device ), static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyPipelineBinaryKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyPipelineBinaryKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( PipelineBinaryKHR pipelineBinary, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function <vkDestroyPipelineBinaryKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + d.vkDestroyPipelineBinaryKHR( + m_device, static_cast<VkPipelineBinaryKHR>( pipelineBinary ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const PipelineCreateInfoKHR * pPipelineCreateInfo, + PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPipelineKeyKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineCreateInfoKHR *>( pPipelineCreateInfo ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineKey ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineKeyKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineKeyKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<PipelineBinaryKeyKHR>::type + Device::getPipelineKeyKHR( Optional<const PipelineCreateInfoKHR> pipelineCreateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function <vkGetPipelineKeyKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + PipelineBinaryKeyKHR pipelineKey; + Result result = static_cast<Result>( d.vkGetPipelineKeyKHR( + m_device, reinterpret_cast<const VkPipelineCreateInfoKHR *>( pipelineCreateInfo.get() ), reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineKey ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); + + return detail::createResultValueType( result, std::move( pipelineKey ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const PipelineBinaryDataInfoKHR * pInfo, + PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPipelineBinaryDataKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( pInfo ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( pPipelineBinaryKey ), + pPipelineBinaryDataSize, + pPipelineBinaryData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type + Device::getPipelineBinaryDataKHR( const PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + std::pair<PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>> data_; + PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector<uint8_t, Uint8_tAllocator> & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + Result result = static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast<void *>( pipelineBinaryData.data() ) ) ); + } + + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetPipelineBinaryDataKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineBinaryDataKHR.html + template <typename Uint8_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint8_tAllocator::value_type, uint8_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type + Device::getPipelineBinaryDataKHR( const PipelineBinaryDataInfoKHR & info, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function <vkGetPipelineBinaryDataKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + std::pair<PipelineBinaryKeyKHR, std::vector<uint8_t, Uint8_tAllocator>> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector<uint8_t, Uint8_tAllocator> & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + Result result = static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast<Result>( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast<const VkPipelineBinaryDataInfoKHR *>( &info ), + reinterpret_cast<VkPipelineBinaryKeyKHR *>( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast<void *>( pipelineBinaryData.data() ) ) ); + } + + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const ReleaseCapturedPipelineDataInfoKHR * pInfo, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( pInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseCapturedPipelineDataKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseCapturedPipelineDataKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseCapturedPipelineDataKHR( + const ReleaseCapturedPipelineDataInfoKHR & info, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function <vkReleaseCapturedPipelineDataKHR> requires <VK_KHR_pipeline_binary>" ); +# endif + + Result result = static_cast<Result>( d.vkReleaseCapturedPipelineDataKHR( + m_device, reinterpret_cast<const VkReleaseCapturedPipelineDataInfoKHR *>( &info ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseCapturedPipelineDataKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( Framebuffer framebuffer, + uint32_t * pPropertiesCount, + TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ), + static_cast<VkFramebuffer>( framebuffer ), + pPropertiesCount, + reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template <typename TilePropertiesQCOMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, TilePropertiesQCOM>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type + Device::getFramebufferTilePropertiesQCOM( Framebuffer framebuffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); +# endif + + std::vector<TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties; + uint32_t propertiesCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFramebufferTilePropertiesQCOM" ); + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetFramebufferTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetFramebufferTilePropertiesQCOM.html + template <typename TilePropertiesQCOMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename TilePropertiesQCOMAllocator::value_type, TilePropertiesQCOM>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type + Device::getFramebufferTilePropertiesQCOM( Framebuffer framebuffer, TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); +# endif + + std::vector<TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator ); + uint32_t propertiesCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFramebufferTilePropertiesQCOM" ); + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const RenderingInfo * pRenderingInfo, + TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), + reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDynamicRenderingTilePropertiesQCOM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDynamicRenderingTilePropertiesQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<TilePropertiesQCOM>::type + Device::getDynamicRenderingTilePropertiesQCOM( const RenderingInfo & renderingInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" ); +# endif + + TilePropertiesQCOM properties; + Result result = static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM( + m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDynamicRenderingTilePropertiesQCOM" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain_maintenance1 === + + // wrapper function for command vkReleaseSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesKHR( const ReleaseSwapchainImagesInfoKHR * pReleaseInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkReleaseSwapchainImagesKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkReleaseSwapchainImagesInfoKHR *>( pReleaseInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkReleaseSwapchainImagesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkReleaseSwapchainImagesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::releaseSwapchainImagesKHR( const ReleaseSwapchainImagesInfoKHR & releaseInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesKHR && + "Function <vkReleaseSwapchainImagesKHR> requires <VK_EXT_swapchain_maintenance1> or <VK_KHR_swapchain_maintenance1>" ); +# endif + + Result result = static_cast<Result>( d.vkReleaseSwapchainImagesKHR( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoKHR *>( &releaseInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesKHR" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeVectorPropertiesNV( uint32_t * pPropertyCount, + CooperativeVectorPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeVectorPropertiesNV *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template <typename CooperativeVectorPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeVectorPropertiesNVAllocator::value_type, CooperativeVectorPropertiesNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeVectorPropertiesNV, CooperativeVectorPropertiesNVAllocator>>::type + PhysicalDevice::getCooperativeVectorPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function <vkGetPhysicalDeviceCooperativeVectorPropertiesNV> requires <VK_NV_cooperative_vector>" ); +# endif + + std::vector<CooperativeVectorPropertiesNV, CooperativeVectorPropertiesNVAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeVectorPropertiesNV *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeVectorPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeVectorPropertiesNV.html + template <typename CooperativeVectorPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeVectorPropertiesNVAllocator::value_type, CooperativeVectorPropertiesNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeVectorPropertiesNV, CooperativeVectorPropertiesNVAllocator>>::type + PhysicalDevice::getCooperativeVectorPropertiesNV( CooperativeVectorPropertiesNVAllocator & cooperativeVectorPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function <vkGetPhysicalDeviceCooperativeVectorPropertiesNV> requires <VK_NV_cooperative_vector>" ); +# endif + + std::vector<CooperativeVectorPropertiesNV, CooperativeVectorPropertiesNVAllocator> properties( cooperativeVectorPropertiesNVAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeVectorPropertiesNV *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::convertCooperativeVectorMatrixNV( const ConvertCooperativeVectorMatrixInfoNV * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkConvertCooperativeVectorMatrixNV( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV *>( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkConvertCooperativeVectorMatrixNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::convertCooperativeVectorMatrixNV( const ConvertCooperativeVectorMatrixInfoNV & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkConvertCooperativeVectorMatrixNV && "Function <vkConvertCooperativeVectorMatrixNV> requires <VK_NV_cooperative_vector>" ); +# endif + + Result result = + static_cast<Result>( d.vkConvertCooperativeVectorMatrixNV( m_device, reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV *>( &info ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::convertCooperativeVectorMatrixNV", { Result::eSuccess, Result::eIncomplete } ); + + return static_cast<Result>( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( uint32_t infoCount, + const ConvertCooperativeVectorMatrixInfoNV * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdConvertCooperativeVectorMatrixNV( + static_cast<VkCommandBuffer>( m_commandBuffer ), infoCount, reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV *>( pInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdConvertCooperativeVectorMatrixNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdConvertCooperativeVectorMatrixNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( ArrayProxy<const ConvertCooperativeVectorMatrixInfoNV> const & infos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdConvertCooperativeVectorMatrixNV && "Function <vkCmdConvertCooperativeVectorMatrixNV> requires <VK_NV_cooperative_vector>" ); +# endif + + d.vkCmdConvertCooperativeVectorMatrixNV( m_commandBuffer, infos.size(), reinterpret_cast<const VkConvertCooperativeVectorMatrixInfoNV *>( infos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( SwapchainKHR swapchain, + const LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkSetLatencySleepModeNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::setLatencySleepModeNV( SwapchainKHR swapchain, const LatencySleepModeInfoNV & sleepModeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" ); +# endif + + Result result = static_cast<Result>( + d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::latencySleepNV( SwapchainKHR swapchain, + const LatencySleepInfoNV * pSleepInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkLatencySleepNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkLatencySleepNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkLatencySleepNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::latencySleepNV( SwapchainKHR swapchain, const LatencySleepInfoNV & sleepInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" ); +# endif + + Result result = static_cast<Result>( + d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::latencySleepNV" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::setLatencyMarkerNV( SwapchainKHR swapchain, const SetLatencyMarkerInfoNV * pLatencyMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLatencyMarkerNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkSetLatencyMarkerNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencyMarkerNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::setLatencyMarkerNV( SwapchainKHR swapchain, const SetLatencyMarkerInfoNV & latencyMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" ); +# endif + + d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::getLatencyTimingsNV( SwapchainKHR swapchain, GetLatencyMarkerInfoNV * pLatencyMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetLatencyTimingsNV( + static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template <typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename LatencyTimingsFrameReportNVAllocator::value_type, LatencyTimingsFrameReportNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> + Device::getLatencyTimingsNV( SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); +# endif + + std::vector<LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> timings; + GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); + + return timings; + } + + // wrapper function for command vkGetLatencyTimingsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetLatencyTimingsNV.html + template <typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename LatencyTimingsFrameReportNVAllocator::value_type, LatencyTimingsFrameReportNV>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> + Device::getLatencyTimingsNV( SwapchainKHR swapchain, LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" ); +# endif + + std::vector<LatencyTimingsFrameReportNV, LatencyTimingsFrameReportNVAllocator> timings( latencyTimingsFrameReportNVAllocator ); + GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) ); + + return timings; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const OutOfBandQueueTypeInfoNV * pQueueTypeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueNotifyOutOfBandNV( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueNotifyOutOfBandNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueNotifyOutOfBandNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const OutOfBandQueueTypeInfoNV & queueTypeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" ); +# endif + + d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( uint32_t * pPropertyCount, + CooperativeMatrixPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template < + typename CooperativeMatrixPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, CooperativeMatrixPropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); +# endif + + std::vector<CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.html + template < + typename CooperativeMatrixPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeMatrixPropertiesKHRAllocator::value_type, CooperativeMatrixPropertiesKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" ); +# endif + + std::vector<CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties( cooperativeMatrixPropertiesKHRAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_ARM_data_graph === + + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDataGraphPipelinesARM( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + uint32_t createInfoCount, + const DataGraphPipelineCreateInfoARM * pCreateInfos, + const AllocationCallbacks * pAllocator, + Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDataGraphPipelinesARM( static_cast<VkDevice>( m_device ), + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfoCount, + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( pCreateInfos ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkPipeline *>( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createDataGraphPipelinesARM( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const DataGraphPipelineCreateInfoARM> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelinesARM && "Function <vkCreateDataGraphPipelinesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateDataGraphPipelinesARM( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelinesARM", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename PipelineAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, Pipeline>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> + Device::createDataGraphPipelinesARM( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const DataGraphPipelineCreateInfoARM> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelinesARM && "Function <vkCreateDataGraphPipelinesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); + Result result = static_cast<Result>( d.vkCreateDataGraphPipelinesARM( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelinesARM", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<std::vector<Pipeline, PipelineAllocator>>( result, std::move( pipelines ) ); + } + + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createDataGraphPipelineARM( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + const DataGraphPipelineCreateInfoARM & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelinesARM && "Function <vkCreateDataGraphPipelinesARM> requires <VK_ARM_data_graph>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateDataGraphPipelinesARM( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelineARM", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<Pipeline>( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createDataGraphPipelinesARMUnique( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const DataGraphPipelineCreateInfoARM> const & createInfos, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelinesARM && "Function <vkCreateDataGraphPipelinesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateDataGraphPipelinesARM( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelinesARMUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename Dispatch, + typename PipelineAllocator, + typename std::enable_if<std::is_same<typename PipelineAllocator::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> + Device::createDataGraphPipelinesARMUnique( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + ArrayProxy<const DataGraphPipelineCreateInfoARM> const & createInfos, + Optional<const AllocationCallbacks> allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelinesARM && "Function <vkCreateDataGraphPipelinesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<Pipeline> pipelines( createInfos.size() ); + Result result = static_cast<Result>( d.vkCreateDataGraphPipelinesARM( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + createInfos.size(), + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( createInfos.data() ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( pipelines.data() ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelinesARMUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); + } + return ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + // wrapper function for command vkCreateDataGraphPipelinesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelinesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> + Device::createDataGraphPipelineARMUnique( DeferredOperationKHR deferredOperation, + PipelineCache pipelineCache, + const DataGraphPipelineCreateInfoARM & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelinesARM && "Function <vkCreateDataGraphPipelinesARM> requires <VK_ARM_data_graph>" ); +# endif + + Pipeline pipeline; + Result result = static_cast<Result>( d.vkCreateDataGraphPipelinesARM( m_device, + static_cast<VkDeferredOperationKHR>( deferredOperation ), + static_cast<VkPipelineCache>( pipelineCache ), + 1, + reinterpret_cast<const VkDataGraphPipelineCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkPipeline *>( &pipeline ) ) ); + detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelineARMUnique", { Result::eSuccess, Result::ePipelineCompileRequiredEXT } ); + + return ResultValue<UniqueHandle<Pipeline, Dispatch>>( + result, UniqueHandle<Pipeline, Dispatch>( pipeline, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDataGraphPipelineSessionARM( const DataGraphPipelineSessionCreateInfoARM * pCreateInfo, + const AllocationCallbacks * pAllocator, + DataGraphPipelineSessionARM * pSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateDataGraphPipelineSessionARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDataGraphPipelineSessionCreateInfoARM *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkDataGraphPipelineSessionARM *>( pSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataGraphPipelineSessionARM>::type Device::createDataGraphPipelineSessionARM( + const DataGraphPipelineSessionCreateInfoARM & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelineSessionARM && "Function <vkCreateDataGraphPipelineSessionARM> requires <VK_ARM_data_graph>" ); +# endif + + DataGraphPipelineSessionARM session; + Result result = + static_cast<Result>( d.vkCreateDataGraphPipelineSessionARM( m_device, + reinterpret_cast<const VkDataGraphPipelineSessionCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDataGraphPipelineSessionARM *>( &session ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelineSessionARM" ); + + return detail::createResultValueType( result, std::move( session ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DataGraphPipelineSessionARM, Dispatch>>::type + Device::createDataGraphPipelineSessionARMUnique( const DataGraphPipelineSessionCreateInfoARM & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDataGraphPipelineSessionARM && "Function <vkCreateDataGraphPipelineSessionARM> requires <VK_ARM_data_graph>" ); +# endif + + DataGraphPipelineSessionARM session; + Result result = + static_cast<Result>( d.vkCreateDataGraphPipelineSessionARM( m_device, + reinterpret_cast<const VkDataGraphPipelineSessionCreateInfoARM *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkDataGraphPipelineSessionARM *>( &session ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDataGraphPipelineSessionARMUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<DataGraphPipelineSessionARM, Dispatch>( session, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDataGraphPipelineSessionBindPointRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineSessionBindPointRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getDataGraphPipelineSessionBindPointRequirementsARM( const DataGraphPipelineSessionBindPointRequirementsInfoARM * pInfo, + uint32_t * pBindPointRequirementCount, + DataGraphPipelineSessionBindPointRequirementARM * pBindPointRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM( + static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( pInfo ), + pBindPointRequirementCount, + reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementARM *>( pBindPointRequirements ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDataGraphPipelineSessionBindPointRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineSessionBindPointRequirementsARM.html + template <typename DataGraphPipelineSessionBindPointRequirementARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataGraphPipelineSessionBindPointRequirementARMAllocator::value_type, + DataGraphPipelineSessionBindPointRequirementARM>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<DataGraphPipelineSessionBindPointRequirementARM, DataGraphPipelineSessionBindPointRequirementARMAllocator>>::type + Device::getDataGraphPipelineSessionBindPointRequirementsARM( const DataGraphPipelineSessionBindPointRequirementsInfoARM & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM && + "Function <vkGetDataGraphPipelineSessionBindPointRequirementsARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<DataGraphPipelineSessionBindPointRequirementARM, DataGraphPipelineSessionBindPointRequirementARMAllocator> bindPointRequirements; + uint32_t bindPointRequirementCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM( + m_device, reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( &info ), &bindPointRequirementCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && bindPointRequirementCount ) + { + bindPointRequirements.resize( bindPointRequirementCount ); + result = static_cast<Result>( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM( + m_device, + reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( &info ), + &bindPointRequirementCount, + reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementARM *>( bindPointRequirements.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDataGraphPipelineSessionBindPointRequirementsARM" ); + VULKAN_HPP_ASSERT( bindPointRequirementCount <= bindPointRequirements.size() ); + if ( bindPointRequirementCount < bindPointRequirements.size() ) + { + bindPointRequirements.resize( bindPointRequirementCount ); + } + return detail::createResultValueType( result, std::move( bindPointRequirements ) ); + } + + // wrapper function for command vkGetDataGraphPipelineSessionBindPointRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineSessionBindPointRequirementsARM.html + template <typename DataGraphPipelineSessionBindPointRequirementARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataGraphPipelineSessionBindPointRequirementARMAllocator::value_type, + DataGraphPipelineSessionBindPointRequirementARM>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<DataGraphPipelineSessionBindPointRequirementARM, DataGraphPipelineSessionBindPointRequirementARMAllocator>>::type + Device::getDataGraphPipelineSessionBindPointRequirementsARM( + const DataGraphPipelineSessionBindPointRequirementsInfoARM & info, + DataGraphPipelineSessionBindPointRequirementARMAllocator & dataGraphPipelineSessionBindPointRequirementARMAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM && + "Function <vkGetDataGraphPipelineSessionBindPointRequirementsARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<DataGraphPipelineSessionBindPointRequirementARM, DataGraphPipelineSessionBindPointRequirementARMAllocator> bindPointRequirements( + dataGraphPipelineSessionBindPointRequirementARMAllocator ); + uint32_t bindPointRequirementCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM( + m_device, reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( &info ), &bindPointRequirementCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && bindPointRequirementCount ) + { + bindPointRequirements.resize( bindPointRequirementCount ); + result = static_cast<Result>( d.vkGetDataGraphPipelineSessionBindPointRequirementsARM( + m_device, + reinterpret_cast<const VkDataGraphPipelineSessionBindPointRequirementsInfoARM *>( &info ), + &bindPointRequirementCount, + reinterpret_cast<VkDataGraphPipelineSessionBindPointRequirementARM *>( bindPointRequirements.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDataGraphPipelineSessionBindPointRequirementsARM" ); + VULKAN_HPP_ASSERT( bindPointRequirementCount <= bindPointRequirements.size() ); + if ( bindPointRequirementCount < bindPointRequirements.size() ) + { + bindPointRequirements.resize( bindPointRequirementCount ); + } + return detail::createResultValueType( result, std::move( bindPointRequirements ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDataGraphPipelineSessionMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineSessionMemoryRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getDataGraphPipelineSessionMemoryRequirementsARM( const DataGraphPipelineSessionMemoryRequirementsInfoARM * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDataGraphPipelineSessionMemoryRequirementsARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDataGraphPipelineSessionMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineSessionMemoryRequirementsARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 Device::getDataGraphPipelineSessionMemoryRequirementsARM( + const DataGraphPipelineSessionMemoryRequirementsInfoARM & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDataGraphPipelineSessionMemoryRequirementsARM && + "Function <vkGetDataGraphPipelineSessionMemoryRequirementsARM> requires <VK_ARM_data_graph>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetDataGraphPipelineSessionMemoryRequirementsARM( m_device, + reinterpret_cast<const VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( &info ), + reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetDataGraphPipelineSessionMemoryRequirementsARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineSessionMemoryRequirementsARM.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getDataGraphPipelineSessionMemoryRequirementsARM( const DataGraphPipelineSessionMemoryRequirementsInfoARM & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDataGraphPipelineSessionMemoryRequirementsARM && + "Function <vkGetDataGraphPipelineSessionMemoryRequirementsARM> requires <VK_ARM_data_graph>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetDataGraphPipelineSessionMemoryRequirementsARM( m_device, + reinterpret_cast<const VkDataGraphPipelineSessionMemoryRequirementsInfoARM *>( &info ), + reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkBindDataGraphPipelineSessionMemoryARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindDataGraphPipelineSessionMemoryARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindDataGraphPipelineSessionMemoryARM( uint32_t bindInfoCount, + const BindDataGraphPipelineSessionMemoryInfoARM * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkBindDataGraphPipelineSessionMemoryARM( + static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindDataGraphPipelineSessionMemoryInfoARM *>( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkBindDataGraphPipelineSessionMemoryARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindDataGraphPipelineSessionMemoryARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::bindDataGraphPipelineSessionMemoryARM( ArrayProxy<const BindDataGraphPipelineSessionMemoryInfoARM> const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindDataGraphPipelineSessionMemoryARM && "Function <vkBindDataGraphPipelineSessionMemoryARM> requires <VK_ARM_data_graph>" ); +# endif + + Result result = static_cast<Result>( d.vkBindDataGraphPipelineSessionMemoryARM( + m_device, bindInfos.size(), reinterpret_cast<const VkBindDataGraphPipelineSessionMemoryInfoARM *>( bindInfos.data() ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindDataGraphPipelineSessionMemoryARM" ); + + return detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDataGraphPipelineSessionARM( DataGraphPipelineSessionARM session, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDataGraphPipelineSessionARM( + static_cast<VkDevice>( m_device ), static_cast<VkDataGraphPipelineSessionARM>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyDataGraphPipelineSessionARM( DataGraphPipelineSessionARM session, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDataGraphPipelineSessionARM && "Function <vkDestroyDataGraphPipelineSessionARM> requires <VK_ARM_data_graph>" ); +# endif + + d.vkDestroyDataGraphPipelineSessionARM( + m_device, static_cast<VkDataGraphPipelineSessionARM>( session ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DataGraphPipelineSessionARM session, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDataGraphPipelineSessionARM( + static_cast<VkDevice>( m_device ), static_cast<VkDataGraphPipelineSessionARM>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyDataGraphPipelineSessionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyDataGraphPipelineSessionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( DataGraphPipelineSessionARM session, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDataGraphPipelineSessionARM && "Function <vkDestroyDataGraphPipelineSessionARM> requires <VK_ARM_data_graph>" ); +# endif + + d.vkDestroyDataGraphPipelineSessionARM( + m_device, static_cast<VkDataGraphPipelineSessionARM>( session ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDispatchDataGraphARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchDataGraphARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchDataGraphARM( DataGraphPipelineSessionARM session, + const DataGraphPipelineDispatchInfoARM * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchDataGraphARM( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkDataGraphPipelineSessionARM>( session ), + reinterpret_cast<const VkDataGraphPipelineDispatchInfoARM *>( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDispatchDataGraphARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDispatchDataGraphARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::dispatchDataGraphARM( DataGraphPipelineSessionARM session, + Optional<const DataGraphPipelineDispatchInfoARM> info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchDataGraphARM && "Function <vkCmdDispatchDataGraphARM> requires <VK_ARM_data_graph>" ); +# endif + + d.vkCmdDispatchDataGraphARM( + m_commandBuffer, static_cast<VkDataGraphPipelineSessionARM>( session ), reinterpret_cast<const VkDataGraphPipelineDispatchInfoARM *>( info.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDataGraphPipelineAvailablePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineAvailablePropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDataGraphPipelineAvailablePropertiesARM( const DataGraphPipelineInfoARM * pPipelineInfo, + uint32_t * pPropertiesCount, + DataGraphPipelinePropertyARM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDataGraphPipelineAvailablePropertiesARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDataGraphPipelineInfoARM *>( pPipelineInfo ), + pPropertiesCount, + reinterpret_cast<VkDataGraphPipelinePropertyARM *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetDataGraphPipelineAvailablePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineAvailablePropertiesARM.html + template <typename DataGraphPipelinePropertyARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataGraphPipelinePropertyARMAllocator::value_type, DataGraphPipelinePropertyARM>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataGraphPipelinePropertyARM, DataGraphPipelinePropertyARMAllocator>>::type + Device::getDataGraphPipelineAvailablePropertiesARM( const DataGraphPipelineInfoARM & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDataGraphPipelineAvailablePropertiesARM && + "Function <vkGetDataGraphPipelineAvailablePropertiesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<DataGraphPipelinePropertyARM, DataGraphPipelinePropertyARMAllocator> properties; + uint32_t propertiesCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDataGraphPipelineAvailablePropertiesARM( + m_device, reinterpret_cast<const VkDataGraphPipelineInfoARM *>( &pipelineInfo ), &propertiesCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = + static_cast<Result>( d.vkGetDataGraphPipelineAvailablePropertiesARM( m_device, + reinterpret_cast<const VkDataGraphPipelineInfoARM *>( &pipelineInfo ), + &propertiesCount, + reinterpret_cast<VkDataGraphPipelinePropertyARM *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDataGraphPipelineAvailablePropertiesARM" ); + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetDataGraphPipelineAvailablePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelineAvailablePropertiesARM.html + template <typename DataGraphPipelinePropertyARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename DataGraphPipelinePropertyARMAllocator::value_type, DataGraphPipelinePropertyARM>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataGraphPipelinePropertyARM, DataGraphPipelinePropertyARMAllocator>>::type + Device::getDataGraphPipelineAvailablePropertiesARM( const DataGraphPipelineInfoARM & pipelineInfo, + DataGraphPipelinePropertyARMAllocator & dataGraphPipelinePropertyARMAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDataGraphPipelineAvailablePropertiesARM && + "Function <vkGetDataGraphPipelineAvailablePropertiesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<DataGraphPipelinePropertyARM, DataGraphPipelinePropertyARMAllocator> properties( dataGraphPipelinePropertyARMAllocator ); + uint32_t propertiesCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetDataGraphPipelineAvailablePropertiesARM( + m_device, reinterpret_cast<const VkDataGraphPipelineInfoARM *>( &pipelineInfo ), &propertiesCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = + static_cast<Result>( d.vkGetDataGraphPipelineAvailablePropertiesARM( m_device, + reinterpret_cast<const VkDataGraphPipelineInfoARM *>( &pipelineInfo ), + &propertiesCount, + reinterpret_cast<VkDataGraphPipelinePropertyARM *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDataGraphPipelineAvailablePropertiesARM" ); + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetDataGraphPipelinePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDataGraphPipelinePropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDataGraphPipelinePropertiesARM( const DataGraphPipelineInfoARM * pPipelineInfo, + uint32_t propertiesCount, + DataGraphPipelinePropertyQueryResultARM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetDataGraphPipelinePropertiesARM( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkDataGraphPipelineInfoARM *>( pPipelineInfo ), + propertiesCount, + reinterpret_cast<VkDataGraphPipelinePropertyQueryResultARM *>( pProperties ) ) ); + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getQueueFamilyDataGraphPropertiesARM( uint32_t queueFamilyIndex, + uint32_t * pQueueFamilyDataGraphPropertyCount, + QueueFamilyDataGraphPropertiesARM * pQueueFamilyDataGraphProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM( static_cast<VkPhysicalDevice>( m_physicalDevice ), + queueFamilyIndex, + pQueueFamilyDataGraphPropertyCount, + reinterpret_cast<VkQueueFamilyDataGraphPropertiesARM *>( pQueueFamilyDataGraphProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM.html + template < + typename QueueFamilyDataGraphPropertiesARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyDataGraphPropertiesARMAllocator::value_type, QueueFamilyDataGraphPropertiesARM>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<QueueFamilyDataGraphPropertiesARM, QueueFamilyDataGraphPropertiesARMAllocator>>::type + PhysicalDevice::getQueueFamilyDataGraphPropertiesARM( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM && + "Function <vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<QueueFamilyDataGraphPropertiesARM, QueueFamilyDataGraphPropertiesARMAllocator> queueFamilyDataGraphProperties; + uint32_t queueFamilyDataGraphPropertyCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM( m_physicalDevice, queueFamilyIndex, &queueFamilyDataGraphPropertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && queueFamilyDataGraphPropertyCount ) + { + queueFamilyDataGraphProperties.resize( queueFamilyDataGraphPropertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM( + m_physicalDevice, + queueFamilyIndex, + &queueFamilyDataGraphPropertyCount, + reinterpret_cast<VkQueueFamilyDataGraphPropertiesARM *>( queueFamilyDataGraphProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getQueueFamilyDataGraphPropertiesARM" ); + VULKAN_HPP_ASSERT( queueFamilyDataGraphPropertyCount <= queueFamilyDataGraphProperties.size() ); + if ( queueFamilyDataGraphPropertyCount < queueFamilyDataGraphProperties.size() ) + { + queueFamilyDataGraphProperties.resize( queueFamilyDataGraphPropertyCount ); + } + return detail::createResultValueType( result, std::move( queueFamilyDataGraphProperties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM.html + template < + typename QueueFamilyDataGraphPropertiesARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename QueueFamilyDataGraphPropertiesARMAllocator::value_type, QueueFamilyDataGraphPropertiesARM>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<QueueFamilyDataGraphPropertiesARM, QueueFamilyDataGraphPropertiesARMAllocator>>::type + PhysicalDevice::getQueueFamilyDataGraphPropertiesARM( uint32_t queueFamilyIndex, + QueueFamilyDataGraphPropertiesARMAllocator & queueFamilyDataGraphPropertiesARMAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM && + "Function <vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM> requires <VK_ARM_data_graph>" ); +# endif + + std::vector<QueueFamilyDataGraphPropertiesARM, QueueFamilyDataGraphPropertiesARMAllocator> queueFamilyDataGraphProperties( + queueFamilyDataGraphPropertiesARMAllocator ); + uint32_t queueFamilyDataGraphPropertyCount; + Result result; + do + { + result = static_cast<Result>( + d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM( m_physicalDevice, queueFamilyIndex, &queueFamilyDataGraphPropertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && queueFamilyDataGraphPropertyCount ) + { + queueFamilyDataGraphProperties.resize( queueFamilyDataGraphPropertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM( + m_physicalDevice, + queueFamilyIndex, + &queueFamilyDataGraphPropertyCount, + reinterpret_cast<VkQueueFamilyDataGraphPropertiesARM *>( queueFamilyDataGraphProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getQueueFamilyDataGraphPropertiesARM" ); + VULKAN_HPP_ASSERT( queueFamilyDataGraphPropertyCount <= queueFamilyDataGraphProperties.size() ); + if ( queueFamilyDataGraphPropertyCount < queueFamilyDataGraphProperties.size() ) + { + queueFamilyDataGraphProperties.resize( queueFamilyDataGraphPropertyCount ); + } + return detail::createResultValueType( result, std::move( queueFamilyDataGraphProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyDataGraphProcessingEnginePropertiesARM( + const PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM * pQueueFamilyDataGraphProcessingEngineInfo, + QueueFamilyDataGraphProcessingEnginePropertiesARM * pQueueFamilyDataGraphProcessingEngineProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM( + static_cast<VkPhysicalDevice>( m_physicalDevice ), + reinterpret_cast<const VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *>( pQueueFamilyDataGraphProcessingEngineInfo ), + reinterpret_cast<VkQueueFamilyDataGraphProcessingEnginePropertiesARM *>( pQueueFamilyDataGraphProcessingEngineProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE QueueFamilyDataGraphProcessingEnginePropertiesARM PhysicalDevice::getQueueFamilyDataGraphProcessingEnginePropertiesARM( + const PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & queueFamilyDataGraphProcessingEngineInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM && + "Function <vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM> requires <VK_ARM_data_graph>" ); +# endif + + QueueFamilyDataGraphProcessingEnginePropertiesARM queueFamilyDataGraphProcessingEngineProperties; + d.vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM( + m_physicalDevice, + reinterpret_cast<const VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *>( &queueFamilyDataGraphProcessingEngineInfo ), + reinterpret_cast<VkQueueFamilyDataGraphProcessingEnginePropertiesARM *>( &queueFamilyDataGraphProcessingEngineProperties ) ); + + return queueFamilyDataGraphProcessingEngineProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + // wrapper function for command vkCmdSetAttachmentFeedbackLoopEnableEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetAttachmentFeedbackLoopEnableEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( ImageAspectFlags aspectMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageAspectFlags>( aspectMask ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkGetScreenBufferPropertiesQNX( static_cast<VkDevice>( m_device ), buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ScreenBufferPropertiesQNX>::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); +# endif + + ScreenBufferPropertiesQNX properties; + Result result = + static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetScreenBufferPropertiesQNX, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetScreenBufferPropertiesQNX.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + ScreenBufferPropertiesQNX & properties = structureChain.template get<ScreenBufferPropertiesQNX>(); + Result result = + static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + + // wrapper function for command vkCmdSetLineStippleKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetLineStippleKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast<VkPhysicalDevice>( m_physicalDevice ), pTimeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template <typename TimeDomainKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, TimeDomainKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainKHR, TimeDomainKHRAllocator>>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::vector<TimeDomainKHR, TimeDomainKHRAllocator> timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return detail::createResultValueType( result, std::move( timeDomains ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsKHR.html + template <typename TimeDomainKHRAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename TimeDomainKHRAllocator::value_type, TimeDomainKHR>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainKHR, TimeDomainKHRAllocator>>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::vector<TimeDomainKHR, TimeDomainKHRAllocator> timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast<Result>( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainKHR *>( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return detail::createResultValueType( result, std::move( timeDomains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, + const CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetCalibratedTimestampsKHR( static_cast<VkDevice>( m_device ), + timestampCount, + reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template <typename Uint64_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type + Device::getCalibratedTimestampsKHR( ArrayProxy<const CalibratedTimestampInfoKHR> const & timestampInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + Result result = static_cast<Result>( d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template <typename Uint64_tAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename Uint64_tAllocator::value_type, uint64_t>::value, int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type + Device::getCalibratedTimestampsKHR( ArrayProxy<const CalibratedTimestampInfoKHR> const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + Result result = static_cast<Result>( d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkGetCalibratedTimestampsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetCalibratedTimestampsKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type + Device::getCalibratedTimestampKHR( const CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function <vkGetCalibratedTimestampsKHR> requires <VK_EXT_calibrated_timestamps> or <VK_KHR_calibrated_timestamps>" ); +# endif + + std::pair<uint64_t, uint64_t> data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + Result result = static_cast<Result>( + d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoKHR *>( ×tampInfo ), ×tamp, &maxDeviation ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + + return detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance6 === + + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBindDescriptorSetsInfo *>( pBindDescriptorSetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorSets2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorSets2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function <vkCmdBindDescriptorSets2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast<const VkBindDescriptorSetsInfo *>( &bindDescriptorSetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const PushConstantsInfo * pPushConstantsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkPushConstantsInfo *>( pPushConstantsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushConstants2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushConstants2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const PushConstantsInfo & pushConstantsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function <vkCmdPushConstants2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast<const VkPushConstantsInfo *>( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPushDescriptorSetInfo *>( pPushDescriptorSetInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSet2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSet2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function <vkCmdPushDescriptorSet2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast<const VkPushDescriptorSetInfo *>( &pushDescriptorSetInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR( const PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( pPushDescriptorSetWithTemplateInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPushDescriptorSetWithTemplate2KHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPushDescriptorSetWithTemplate2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR( const PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && + "Function <vkCmdPushDescriptorSetWithTemplate2KHR> requires <VK_KHR_maintenance6> or <VK_VERSION_1_4>" ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + reinterpret_cast<const VkPushDescriptorSetWithTemplateInfo *>( &pushDescriptorSetWithTemplateInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT( const SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( pSetDescriptorBufferOffsetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdSetDescriptorBufferOffsets2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetDescriptorBufferOffsets2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT( const SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function <vkCmdSetDescriptorBufferOffsets2EXT> requires <VK_KHR_maintenance6>" ); +# endif + + d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast<const VkSetDescriptorBufferOffsetsInfoEXT *>( &setDescriptorBufferOffsetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( const BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( pBindDescriptorBufferEmbeddedSamplersInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindDescriptorBufferEmbeddedSamplers2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( const BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && + "Function <vkCmdBindDescriptorBufferEmbeddedSamplers2EXT> requires <VK_KHR_maintenance6>" ); +# endif + + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + m_commandBuffer, reinterpret_cast<const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *>( &bindDescriptorBufferEmbeddedSamplersInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_memory_heap === + + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindTileMemoryQCOM( const TileMemoryBindInfoQCOM * pTileMemoryBindInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindTileMemoryQCOM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkTileMemoryBindInfoQCOM *>( pTileMemoryBindInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBindTileMemoryQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBindTileMemoryQCOM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::bindTileMemoryQCOM( Optional<const TileMemoryBindInfoQCOM> tileMemoryBindInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindTileMemoryQCOM && "Function <vkCmdBindTileMemoryQCOM> requires <VK_QCOM_tile_memory_heap>" ); +# endif + + d.vkCmdBindTileMemoryQCOM( m_commandBuffer, reinterpret_cast<const VkTileMemoryBindInfoQCOM *>( tileMemoryBindInfo.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_copy_memory_indirect === + + // wrapper function for command vkCmdCopyMemoryIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectKHR( const CopyMemoryIndirectInfoKHR * pCopyMemoryIndirectInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyMemoryIndirectInfoKHR *>( pCopyMemoryIndirectInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryIndirectKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectKHR( const CopyMemoryIndirectInfoKHR & copyMemoryIndirectInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryIndirectKHR && "Function <vkCmdCopyMemoryIndirectKHR> requires <VK_KHR_copy_memory_indirect>" ); +# endif + + d.vkCmdCopyMemoryIndirectKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryIndirectInfoKHR *>( ©MemoryIndirectInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdCopyMemoryToImageIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectKHR( const CopyMemoryToImageIndirectInfoKHR * pCopyMemoryToImageIndirectInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToImageIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkCopyMemoryToImageIndirectInfoKHR *>( pCopyMemoryToImageIndirectInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdCopyMemoryToImageIndirectKHR, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyMemoryToImageIndirectKHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectKHR( const CopyMemoryToImageIndirectInfoKHR & copyMemoryToImageIndirectInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectKHR && "Function <vkCmdCopyMemoryToImageIndirectKHR> requires <VK_KHR_copy_memory_indirect>" ); +# endif + + d.vkCmdCopyMemoryToImageIndirectKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToImageIndirectInfoKHR *>( ©MemoryToImageIndirectInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_memory_decompression === + + // wrapper function for command vkCmdDecompressMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryEXT( const DecompressMemoryInfoEXT * pDecompressMemoryInfoEXT, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkDecompressMemoryInfoEXT *>( pDecompressMemoryInfoEXT ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdDecompressMemoryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryEXT( const DecompressMemoryInfoEXT & decompressMemoryInfoEXT, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryEXT && "Function <vkCmdDecompressMemoryEXT> requires <VK_EXT_memory_decompression>" ); +# endif + + d.vkCmdDecompressMemoryEXT( m_commandBuffer, reinterpret_cast<const VkDecompressMemoryInfoEXT *>( &decompressMemoryInfoEXT ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdDecompressMemoryIndirectCountEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdDecompressMemoryIndirectCountEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountEXT( MemoryDecompressionMethodFlagsEXT decompressionMethod, + DeviceAddress indirectCommandsAddress, + DeviceAddress indirectCommandsCountAddress, + uint32_t maxDecompressionCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkMemoryDecompressionMethodFlagsEXT>( decompressionMethod ), + static_cast<VkDeviceAddress>( indirectCommandsAddress ), + static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), + maxDecompressionCount, + stride ); + } + + //=== VK_NV_external_compute_queue === + + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createExternalComputeQueueNV( const ExternalComputeQueueCreateInfoNV * pCreateInfo, + const AllocationCallbacks * pAllocator, + ExternalComputeQueueNV * pExternalQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateExternalComputeQueueNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkExternalComputeQueueCreateInfoNV *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkExternalComputeQueueNV *>( pExternalQueue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<ExternalComputeQueueNV>::type Device::createExternalComputeQueueNV( + const ExternalComputeQueueCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExternalComputeQueueNV && "Function <vkCreateExternalComputeQueueNV> requires <VK_NV_external_compute_queue>" ); +# endif + + ExternalComputeQueueNV externalQueue; + Result result = static_cast<Result>( d.vkCreateExternalComputeQueueNV( m_device, + reinterpret_cast<const VkExternalComputeQueueCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkExternalComputeQueueNV *>( &externalQueue ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExternalComputeQueueNV" ); + + return detail::createResultValueType( result, std::move( externalQueue ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ExternalComputeQueueNV, Dispatch>>::type + Device::createExternalComputeQueueNVUnique( const ExternalComputeQueueCreateInfoNV & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExternalComputeQueueNV && "Function <vkCreateExternalComputeQueueNV> requires <VK_NV_external_compute_queue>" ); +# endif + + ExternalComputeQueueNV externalQueue; + Result result = static_cast<Result>( d.vkCreateExternalComputeQueueNV( m_device, + reinterpret_cast<const VkExternalComputeQueueCreateInfoNV *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkExternalComputeQueueNV *>( &externalQueue ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExternalComputeQueueNVUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<ExternalComputeQueueNV, Dispatch>( externalQueue, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyExternalComputeQueueNV( ExternalComputeQueueNV externalQueue, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyExternalComputeQueueNV( static_cast<VkDevice>( m_device ), + static_cast<VkExternalComputeQueueNV>( externalQueue ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyExternalComputeQueueNV( ExternalComputeQueueNV externalQueue, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyExternalComputeQueueNV && "Function <vkDestroyExternalComputeQueueNV> requires <VK_NV_external_compute_queue>" ); +# endif + + d.vkDestroyExternalComputeQueueNV( + m_device, static_cast<VkExternalComputeQueueNV>( externalQueue ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( ExternalComputeQueueNV externalQueue, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyExternalComputeQueueNV( static_cast<VkDevice>( m_device ), + static_cast<VkExternalComputeQueueNV>( externalQueue ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyExternalComputeQueueNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyExternalComputeQueueNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( ExternalComputeQueueNV externalQueue, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyExternalComputeQueueNV && "Function <vkDestroyExternalComputeQueueNV> requires <VK_NV_external_compute_queue>" ); +# endif + + d.vkDestroyExternalComputeQueueNV( + m_device, static_cast<VkExternalComputeQueueNV>( externalQueue ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + ExternalComputeQueueNV::getData( ExternalComputeQueueDataParamsNV * params, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetExternalComputeQueueDataNV( + static_cast<VkExternalComputeQueueNV>( m_externalComputeQueueNV ), reinterpret_cast<VkExternalComputeQueueDataParamsNV *>( params ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetExternalComputeQueueDataNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetExternalComputeQueueDataNV.html + template <typename DataType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<ExternalComputeQueueDataParamsNV, DataType> + ExternalComputeQueueNV::getData( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExternalComputeQueueDataNV && "Function <vkGetExternalComputeQueueDataNV> requires <VK_NV_external_compute_queue>" ); +# endif + + std::pair<ExternalComputeQueueDataParamsNV, DataType> data_; + ExternalComputeQueueDataParamsNV & arams = data_.first; + DataType & data = data_.second; + d.vkGetExternalComputeQueueDataNV( m_externalComputeQueueNV, reinterpret_cast<VkExternalComputeQueueDataParamsNV *>( &arams ), &data ); + + return data_; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cluster_acceleration_structure === + + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getClusterAccelerationStructureBuildSizesNV( const ClusterAccelerationStructureInputInfoNV * pInfo, + AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetClusterAccelerationStructureBuildSizesNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkClusterAccelerationStructureInputInfoNV *>( pInfo ), + reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetClusterAccelerationStructureBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetClusterAccelerationStructureBuildSizesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE AccelerationStructureBuildSizesInfoKHR + Device::getClusterAccelerationStructureBuildSizesNV( const ClusterAccelerationStructureInputInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetClusterAccelerationStructureBuildSizesNV && + "Function <vkGetClusterAccelerationStructureBuildSizesNV> requires <VK_NV_cluster_acceleration_structure>" ); +# endif + + AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetClusterAccelerationStructureBuildSizesNV( m_device, + reinterpret_cast<const VkClusterAccelerationStructureInputInfoNV *>( &info ), + reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildClusterAccelerationStructureIndirectNV( const ClusterAccelerationStructureCommandsInfoNV * pCommandInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildClusterAccelerationStructureIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkClusterAccelerationStructureCommandsInfoNV *>( pCommandInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildClusterAccelerationStructureIndirectNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildClusterAccelerationStructureIndirectNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildClusterAccelerationStructureIndirectNV( const ClusterAccelerationStructureCommandsInfoNV & commandInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildClusterAccelerationStructureIndirectNV && + "Function <vkCmdBuildClusterAccelerationStructureIndirectNV> requires <VK_NV_cluster_acceleration_structure>" ); +# endif + + d.vkCmdBuildClusterAccelerationStructureIndirectNV( m_commandBuffer, + reinterpret_cast<const VkClusterAccelerationStructureCommandsInfoNV *>( &commandInfos ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_partitioned_acceleration_structure === + + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getPartitionedAccelerationStructuresBuildSizesNV( const PartitionedAccelerationStructureInstancesInputNV * pInfo, + AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPartitionedAccelerationStructuresBuildSizesNV( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkPartitionedAccelerationStructureInstancesInputNV *>( pInfo ), + reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPartitionedAccelerationStructuresBuildSizesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPartitionedAccelerationStructuresBuildSizesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE AccelerationStructureBuildSizesInfoKHR Device::getPartitionedAccelerationStructuresBuildSizesNV( + const PartitionedAccelerationStructureInstancesInputNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPartitionedAccelerationStructuresBuildSizesNV && + "Function <vkGetPartitionedAccelerationStructuresBuildSizesNV> requires <VK_NV_partitioned_acceleration_structure>" ); +# endif + + AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetPartitionedAccelerationStructuresBuildSizesNV( m_device, + reinterpret_cast<const VkPartitionedAccelerationStructureInstancesInputNV *>( &info ), + reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildPartitionedAccelerationStructuresNV( const BuildPartitionedAccelerationStructureInfoNV * pBuildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildPartitionedAccelerationStructuresNV( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBuildPartitionedAccelerationStructureInfoNV *>( pBuildInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBuildPartitionedAccelerationStructuresNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBuildPartitionedAccelerationStructuresNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::buildPartitionedAccelerationStructuresNV( const BuildPartitionedAccelerationStructureInfoNV & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildPartitionedAccelerationStructuresNV && + "Function <vkCmdBuildPartitionedAccelerationStructuresNV> requires <VK_NV_partitioned_acceleration_structure>" ); +# endif + + d.vkCmdBuildPartitionedAccelerationStructuresNV( m_commandBuffer, reinterpret_cast<const VkBuildPartitionedAccelerationStructureInfoNV *>( &buildInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_generated_commands === + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsEXT( const GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( pInfo ), + reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsEXT( const GeneratedCommandsMemoryRequirementsInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function <vkGetGeneratedCommandsMemoryRequirementsEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( &info ), + reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return memoryRequirements; + } + + // wrapper function for command vkGetGeneratedCommandsMemoryRequirementsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetGeneratedCommandsMemoryRequirementsEXT.html + template <typename X, typename Y, typename... Z, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> + Device::getGeneratedCommandsMemoryRequirementsEXT( const GeneratedCommandsMemoryRequirementsInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function <vkGetGeneratedCommandsMemoryRequirementsEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + StructureChain<X, Y, Z...> structureChain; + MemoryRequirements2 & memoryRequirements = structureChain.template get<MemoryRequirements2>(); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoEXT *>( &info ), + reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ), + static_cast<VkCommandBuffer>( stateCommandBuffer ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdPreprocessGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdPreprocessGeneratedCommandsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const GeneratedCommandsInfoEXT & generatedCommandsInfo, + CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsEXT && "Function <vkCmdPreprocessGeneratedCommandsEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkCmdPreprocessGeneratedCommandsEXT( + m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( &generatedCommandsInfo ), static_cast<VkCommandBuffer>( stateCommandBuffer ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( Bool32 isPreprocessed, + const GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + static_cast<VkBool32>( isPreprocessed ), + reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdExecuteGeneratedCommandsEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdExecuteGeneratedCommandsEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( Bool32 isPreprocessed, + const GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsEXT && "Function <vkCmdExecuteGeneratedCommandsEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkCmdExecuteGeneratedCommandsEXT( + m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoEXT *>( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutEXT( const IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkIndirectCommandsLayoutEXT *>( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<IndirectCommandsLayoutEXT>::type Device::createIndirectCommandsLayoutEXT( + const IndirectCommandsLayoutCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function <vkCreateIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + IndirectCommandsLayoutEXT indirectCommandsLayout; + Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( m_device, + reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkIndirectCommandsLayoutEXT *>( &indirectCommandsLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXT" ); + + return detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutEXT, Dispatch>>::type + Device::createIndirectCommandsLayoutEXTUnique( const IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function <vkCreateIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + IndirectCommandsLayoutEXT indirectCommandsLayout; + Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutEXT( m_device, + reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkIndirectCommandsLayoutEXT *>( &indirectCommandsLayout ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXTUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<IndirectCommandsLayoutEXT, Dispatch>( indirectCommandsLayout, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT indirectCommandsLayout, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function <vkDestroyIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( IndirectCommandsLayoutEXT indirectCommandsLayout, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectCommandsLayoutEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectCommandsLayoutEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function <vkDestroyIndirectCommandsLayoutEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, static_cast<VkIndirectCommandsLayoutEXT>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectExecutionSetEXT( const IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const AllocationCallbacks * pAllocator, + IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), + reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkIndirectExecutionSetEXT *>( pIndirectExecutionSet ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<IndirectExecutionSetEXT>::type Device::createIndirectExecutionSetEXT( + const IndirectExecutionSetCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function <vkCreateIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + IndirectExecutionSetEXT indirectExecutionSet; + Result result = static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( m_device, + reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkIndirectExecutionSetEXT *>( &indirectExecutionSet ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXT" ); + + return detail::createResultValueType( result, std::move( indirectExecutionSet ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectExecutionSetEXT, Dispatch>>::type + Device::createIndirectExecutionSetEXTUnique( const IndirectExecutionSetCreateInfoEXT & createInfo, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function <vkCreateIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + IndirectExecutionSetEXT indirectExecutionSet; + Result result = static_cast<Result>( d.vkCreateIndirectExecutionSetEXT( m_device, + reinterpret_cast<const VkIndirectExecutionSetCreateInfoEXT *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkIndirectExecutionSetEXT *>( &indirectExecutionSet ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); + + return detail::createResultValueType( + result, UniqueHandle<IndirectExecutionSetEXT, Dispatch>( indirectExecutionSet, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( IndirectExecutionSetEXT indirectExecutionSet, + const AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( IndirectExecutionSetEXT indirectExecutionSet, + Optional<const AllocationCallbacks> allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function <vkDestroyIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkDestroyIndirectExecutionSetEXT( + m_device, static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( IndirectExecutionSetEXT indirectExecutionSet, const AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectExecutionSetEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkDestroyIndirectExecutionSetEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyIndirectExecutionSetEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void + Device::destroy( IndirectExecutionSetEXT indirectExecutionSet, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function <vkDestroyIndirectExecutionSetEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkDestroyIndirectExecutionSetEXT( + m_device, static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( pExecutionSetWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateIndirectExecutionSetPipelineEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetPipelineEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( IndirectExecutionSetEXT indirectExecutionSet, + ArrayProxy<const WriteIndirectExecutionSetPipelineEXT> const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetPipelineEXT && + "Function <vkUpdateIndirectExecutionSetPipelineEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast<const VkWriteIndirectExecutionSetPipelineEXT *>( executionSetWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateIndirectExecutionSetShaderEXT( static_cast<VkDevice>( m_device ), + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( pExecutionSetWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkUpdateIndirectExecutionSetShaderEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkUpdateIndirectExecutionSetShaderEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( IndirectExecutionSetEXT indirectExecutionSet, + ArrayProxy<const WriteIndirectExecutionSetShaderEXT> const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetShaderEXT && + "Function <vkUpdateIndirectExecutionSetShaderEXT> requires <VK_EXT_device_generated_commands>" ); +# endif + + d.vkUpdateIndirectExecutionSetShaderEXT( m_device, + static_cast<VkIndirectExecutionSetEXT>( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast<const VkWriteIndirectExecutionSetShaderEXT *>( executionSetWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_OHOS ) + //=== VK_OHOS_surface === + + // wrapper function for command vkCreateSurfaceOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSurfaceOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createSurfaceOHOS( const SurfaceCreateInfoOHOS * pCreateInfo, + const AllocationCallbacks * pAllocator, + SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkCreateSurfaceOHOS( static_cast<VkInstance>( m_instance ), + reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( pCreateInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), + reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCreateSurfaceOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSurfaceOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<SurfaceKHR>::type + Instance::createSurfaceOHOS( const SurfaceCreateInfoOHOS & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSurfaceOHOS && "Function <vkCreateSurfaceOHOS> requires <VK_OHOS_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateSurfaceOHOS( m_instance, + reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createSurfaceOHOS" ); + + return detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + // wrapper function for command vkCreateSurfaceOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateSurfaceOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR, Dispatch>>::type + Instance::createSurfaceOHOSUnique( const SurfaceCreateInfoOHOS & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSurfaceOHOS && "Function <vkCreateSurfaceOHOS> requires <VK_OHOS_surface>" ); +# endif + + SurfaceKHR surface; + Result result = static_cast<Result>( d.vkCreateSurfaceOHOS( m_instance, + reinterpret_cast<const VkSurfaceCreateInfoOHOS *>( &createInfo ), + reinterpret_cast<const VkAllocationCallbacks *>( allocator.get() ), + reinterpret_cast<VkSurfaceKHR *>( &surface ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createSurfaceOHOSUnique" ); + + return detail::createResultValueType( result, + UniqueHandle<SurfaceKHR, Dispatch>( surface, detail::ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_OHOS*/ + +#if defined( VK_USE_PLATFORM_OHOS ) + //=== VK_OHOS_native_buffer === + + // wrapper function for command vkGetSwapchainGrallocUsageOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainGrallocUsageOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSwapchainGrallocUsageOHOS( Format format, ImageUsageFlags imageUsage, uint64_t * grallocUsage, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetSwapchainGrallocUsageOHOS( + static_cast<VkDevice>( m_device ), static_cast<VkFormat>( format ), static_cast<VkImageUsageFlags>( imageUsage ), grallocUsage ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetSwapchainGrallocUsageOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetSwapchainGrallocUsageOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type + Device::getSwapchainGrallocUsageOHOS( Format format, ImageUsageFlags imageUsage, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainGrallocUsageOHOS && "Function <vkGetSwapchainGrallocUsageOHOS> requires <VK_OHOS_native_buffer>" ); +# endif + + uint64_t grallocUsage; + Result result = static_cast<Result>( + d.vkGetSwapchainGrallocUsageOHOS( m_device, static_cast<VkFormat>( format ), static_cast<VkImageUsageFlags>( imageUsage ), &grallocUsage ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainGrallocUsageOHOS" ); + + return detail::createResultValueType( result, std::move( grallocUsage ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkAcquireImageOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireImageOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquireImageOHOS( Image image, int32_t nativeFenceFd, Semaphore semaphore, Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkAcquireImageOHOS( + static_cast<VkDevice>( m_device ), static_cast<VkImage>( image ), nativeFenceFd, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ) ) ); + } +# else + // wrapper function for command vkAcquireImageOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkAcquireImageOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type + Device::acquireImageOHOS( Image image, int32_t nativeFenceFd, Semaphore semaphore, Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireImageOHOS && "Function <vkAcquireImageOHOS> requires <VK_OHOS_native_buffer>" ); +# endif + + Result result = static_cast<Result>( + d.vkAcquireImageOHOS( m_device, static_cast<VkImage>( image ), nativeFenceFd, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireImageOHOS" ); + + return detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + // wrapper function for command vkQueueSignalReleaseImageOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSignalReleaseImageOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::signalReleaseImageOHOS( + uint32_t waitSemaphoreCount, const Semaphore * pWaitSemaphores, Image image, int32_t * pNativeFenceFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkQueueSignalReleaseImageOHOS( static_cast<VkQueue>( m_queue ), + waitSemaphoreCount, + reinterpret_cast<const VkSemaphore *>( pWaitSemaphores ), + static_cast<VkImage>( image ), + pNativeFenceFd ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkQueueSignalReleaseImageOHOS, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkQueueSignalReleaseImageOHOS.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int32_t>::type + Queue::signalReleaseImageOHOS( ArrayProxy<const Semaphore> const & waitSemaphores, Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSignalReleaseImageOHOS && "Function <vkQueueSignalReleaseImageOHOS> requires <VK_OHOS_native_buffer>" ); +# endif + + int32_t nativeFenceFd; + Result result = static_cast<Result>( d.vkQueueSignalReleaseImageOHOS( + m_queue, waitSemaphores.size(), reinterpret_cast<const VkSemaphore *>( waitSemaphores.data() ), static_cast<VkImage>( image ), &nativeFenceFd ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::signalReleaseImageOHOS" ); + + return detail::createResultValueType( result, std::move( nativeFenceFd ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_OHOS*/ + + //=== VK_NV_cooperative_matrix2 === + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( + uint32_t * pPropertyCount, CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast<VkPhysicalDevice>( m_physicalDevice ), + pPropertyCount, + reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template <typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator::value_type, + CooperativeMatrixFlexibleDimensionsPropertiesNV>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator>>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function <vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV> requires <VK_NV_cooperative_matrix2>" ); +# endif + + std::vector<CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator> properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } + + // wrapper function for command vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV.html + template <typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator::value_type, + CooperativeMatrixFlexibleDimensionsPropertiesNV>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::vector<CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator>>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function <vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV> requires <VK_NV_cooperative_matrix2>" ); +# endif + + std::vector<CooperativeMatrixFlexibleDimensionsPropertiesNV, CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator> properties( + cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixFlexibleDimensionsPropertiesNV *>( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryMetalHandleEXT( const MemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, + void ** pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryMetalHandleEXT( + static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetMetalHandleInfoEXT *>( pGetMetalHandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryMetalHandleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandleEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type + Device::getMemoryMetalHandleEXT( const MemoryGetMetalHandleInfoEXT & getMetalHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryMetalHandleEXT && "Function <vkGetMemoryMetalHandleEXT> requires <VK_EXT_external_memory_metal>" ); +# endif + + void * handle; + Result result = + static_cast<Result>( d.vkGetMemoryMetalHandleEXT( m_device, reinterpret_cast<const VkMemoryGetMetalHandleInfoEXT *>( &getMetalHandleInfo ), &handle ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandleEXT" ); + + return detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryMetalHandlePropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + MemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( d.vkGetMemoryMetalHandlePropertiesEXT( static_cast<VkDevice>( m_device ), + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + pHandle, + reinterpret_cast<VkMemoryMetalHandlePropertiesEXT *>( pMemoryMetalHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkGetMemoryMetalHandlePropertiesEXT, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetMemoryMetalHandlePropertiesEXT.html + template <typename HandleType, typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<MemoryMetalHandlePropertiesEXT>::type + Device::getMemoryMetalHandlePropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, HandleType const & handle, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryMetalHandlePropertiesEXT && "Function <vkGetMemoryMetalHandlePropertiesEXT> requires <VK_EXT_external_memory_metal>" ); +# endif + + MemoryMetalHandlePropertiesEXT memoryMetalHandleProperties; + Result result = + static_cast<Result>( d.vkGetMemoryMetalHandlePropertiesEXT( m_device, + static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), + reinterpret_cast<const void *>( &handle ), + reinterpret_cast<VkMemoryMetalHandlePropertiesEXT *>( &memoryMetalHandleProperties ) ) ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandlePropertiesEXT" ); + + return detail::createResultValueType( result, std::move( memoryMetalHandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_ARM_performance_counters_by_region === + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateQueueFamilyPerformanceCountersByRegionARM( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + PerformanceCounterARM * pCounters, + PerformanceCounterDescriptionARM * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast<Result>( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM( static_cast<VkPhysicalDevice>( m_physicalDevice ), + queueFamilyIndex, + pCounterCount, + reinterpret_cast<VkPerformanceCounterARM *>( pCounters ), + reinterpret_cast<VkPerformanceCounterDescriptionARM *>( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM.html + template <typename PerformanceCounterARMAllocator, + typename PerformanceCounterDescriptionARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PerformanceCounterARMAllocator::value_type, PerformanceCounterARM>::value && + std::is_same<typename PerformanceCounterDescriptionARMAllocator::value_type, PerformanceCounterDescriptionARM>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::pair<std::vector<PerformanceCounterARM, PerformanceCounterARMAllocator>, + std::vector<PerformanceCounterDescriptionARM, PerformanceCounterDescriptionARMAllocator>>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceCountersByRegionARM( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM && + "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM> requires <VK_ARM_performance_counters_by_region>" ); +# endif + + std::pair<std::vector<PerformanceCounterARM, PerformanceCounterARMAllocator>, + std::vector<PerformanceCounterDescriptionARM, PerformanceCounterDescriptionARMAllocator>> + data_; + std::vector<PerformanceCounterARM, PerformanceCounterARMAllocator> & counters = data_.first; + std::vector<PerformanceCounterDescriptionARM, PerformanceCounterDescriptionARMAllocator> & counterDescriptions = data_.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast<Result>( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast<VkPerformanceCounterARM *>( counters.data() ), + reinterpret_cast<VkPerformanceCounterDescriptionARM *>( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceCountersByRegionARM" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return detail::createResultValueType( result, std::move( data_ ) ); + } + + // wrapper function for command vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM, see + // https://registry.khronos.org/vulkan/specs/latest/man/html/vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM.html + template <typename PerformanceCounterARMAllocator, + typename PerformanceCounterDescriptionARMAllocator, + typename Dispatch, + typename std::enable_if<std::is_same<typename PerformanceCounterARMAllocator::value_type, PerformanceCounterARM>::value && + std::is_same<typename PerformanceCounterDescriptionARMAllocator::value_type, PerformanceCounterDescriptionARM>::value, + int>::type, + typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType<std::pair<std::vector<PerformanceCounterARM, PerformanceCounterARMAllocator>, + std::vector<PerformanceCounterDescriptionARM, PerformanceCounterDescriptionARMAllocator>>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceCountersByRegionARM( uint32_t queueFamilyIndex, + PerformanceCounterARMAllocator & performanceCounterARMAllocator, + PerformanceCounterDescriptionARMAllocator & performanceCounterDescriptionARMAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM && + "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM> requires <VK_ARM_performance_counters_by_region>" ); +# endif + + std::pair<std::vector<PerformanceCounterARM, PerformanceCounterARMAllocator>, + std::vector<PerformanceCounterDescriptionARM, PerformanceCounterDescriptionARMAllocator>> + data_( + std::piecewise_construct, std::forward_as_tuple( performanceCounterARMAllocator ), std::forward_as_tuple( performanceCounterDescriptionARMAllocator ) ); + std::vector<PerformanceCounterARM, PerformanceCounterARMAllocator> & counters = data_.first; + std::vector<PerformanceCounterDescriptionARM, PerformanceCounterDescriptionARMAllocator> & counterDescriptions = data_.second; + uint32_t counterCount; + Result result; + do + { + result = static_cast<Result>( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast<VkPerformanceCounterARM *>( counters.data() ), + reinterpret_cast<VkPerformanceCounterDescriptionARM *>( counterDescriptions.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceCountersByRegionARM" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_fragment_density_map_offset === + + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRendering2EXT( const RenderingEndInfoKHR * pRenderingEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingEndInfoKHR *>( pRenderingEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRendering2EXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2EXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRendering2EXT( Optional<const RenderingEndInfoKHR> renderingEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRendering2EXT && "Function <vkCmdEndRendering2EXT> requires <VK_EXT_fragment_density_map_offset> or <VK_KHR_maintenance10>" ); +# endif + + d.vkCmdEndRendering2EXT( m_commandBuffer, reinterpret_cast<const VkRenderingEndInfoKHR *>( renderingEndInfo.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_custom_resolve === + + // wrapper function for command vkCmdBeginCustomResolveEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginCustomResolveEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginCustomResolveEXT( const BeginCustomResolveInfoEXT * pBeginCustomResolveInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginCustomResolveEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), + reinterpret_cast<const VkBeginCustomResolveInfoEXT *>( pBeginCustomResolveInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdBeginCustomResolveEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdBeginCustomResolveEXT.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::beginCustomResolveEXT( Optional<const BeginCustomResolveInfoEXT> beginCustomResolveInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginCustomResolveEXT && "Function <vkCmdBeginCustomResolveEXT> requires <VK_EXT_custom_resolve>" ); +# endif + + d.vkCmdBeginCustomResolveEXT( m_commandBuffer, reinterpret_cast<const VkBeginCustomResolveInfoEXT *>( beginCustomResolveInfo.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance10 === + + // wrapper function for command vkCmdEndRendering2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRendering2KHR( const RenderingEndInfoKHR * pRenderingEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingEndInfoKHR *>( pRenderingEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + // wrapper function for command vkCmdEndRendering2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdEndRendering2KHR.html + template <typename Dispatch, typename std::enable_if<detail::isDispatchLoader<Dispatch>::value, bool>::type> + VULKAN_HPP_INLINE void CommandBuffer::endRendering2KHR( Optional<const RenderingEndInfoKHR> renderingEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRendering2KHR && "Function <vkCmdEndRendering2KHR> requires <VK_EXT_fragment_density_map_offset> or <VK_KHR_maintenance10>" ); +# endif + + d.vkCmdEndRendering2KHR( m_commandBuffer, reinterpret_cast<const VkRenderingEndInfoKHR *>( renderingEndInfo.get() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +} // namespace VULKAN_HPP_NAMESPACE +#endif
