diff --git a/Vulkan-Headers b/Vulkan-Headers index e12a8f8..d4c2217 160000 --- a/Vulkan-Headers +++ b/Vulkan-Headers @@ -1 +1 @@ -Subproject commit e12a8f8cde4047fb40c34bc1bf624e24c0d0c76e +Subproject commit d4c221772cb222117446521517254c91f9211801 diff --git a/vulkan/vulkan.hpp b/vulkan/vulkan.hpp index 1558726..d8d16e5 100644 --- a/vulkan/vulkan.hpp +++ b/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 232, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 233, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -5454,6 +5454,44 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); } + //=== VK_NV_copy_memory_indirect === + + void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); + } + + void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); + } + + //=== VK_NV_memory_decompression === + + void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); + } + + void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); + } + //=== VK_EXT_extended_dynamic_state3 === void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT @@ -11607,6 +11645,58 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_copy_memory_indirect === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_memory_decompression === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_linear_color_attachment === template <> struct StructExtends @@ -11935,6 +12025,32 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_ray_tracing_invocation_reorder === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_mutable_descriptor_type === template <> struct StructExtends @@ -13070,6 +13186,14 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + //=== VK_EXT_extended_dynamic_state3 === PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; @@ -14329,6 +14453,15 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + //=== VK_EXT_extended_dynamic_state3 === vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); @@ -15245,6 +15378,14 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + //=== VK_EXT_extended_dynamic_state3 === vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); diff --git a/vulkan/vulkan_enums.hpp b/vulkan/vulkan_enums.hpp index 3fa9c66..a56958e 100644 --- a/vulkan/vulkan_enums.hpp +++ b/vulkan/vulkan_enums.hpp @@ -821,6 +821,10 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, + ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, + ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, + ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, + ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, @@ -850,6 +854,8 @@ namespace VULKAN_HPP_NAMESPACE eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC, + ePhysicalDeviceRayTracingInvocationReorderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV, + ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV, ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM, @@ -6200,6 +6206,22 @@ namespace VULKAN_HPP_NAMESPACE eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT }; + //=== VK_NV_memory_decompression === + + enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV + { + eGdeflate10 = VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV + }; + + using MemoryDecompressionMethodFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryDecompressionMethodFlagsNV allFlags = MemoryDecompressionMethodFlagBitsNV::eGdeflate10; + }; + //=== VK_EXT_subpass_merge_feedback === enum class SubpassMergeStatusEXT @@ -6356,6 +6378,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; }; + //=== VK_NV_ray_tracing_invocation_reorder === + + enum class RayTracingInvocationReorderModeNV + { + eNone = VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV, + eReorder = VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV + }; + //========================= //=== Index Type Traits === //========================= diff --git a/vulkan/vulkan_funcs.hpp b/vulkan/vulkan_funcs.hpp index 650fbc3..99c98aa 100644 --- a/vulkan/vulkan_funcs.hpp +++ b/vulkan/vulkan_funcs.hpp @@ -20363,6 +20363,94 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_NV_copy_memory_indirect === + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast( copyBufferAddress ), copyCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast( copyBufferAddress ), + copyCount, + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( pImageSubresources ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_memory_decompression === + + template + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast( pDecompressMemoryRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDecompressMemoryNV( + m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast( decompressMemoryRegions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryIndirectCountNV( + m_commandBuffer, static_cast( indirectCommandsAddress ), static_cast( indirectCommandsCountAddress ), stride ); + } + //=== VK_EXT_extended_dynamic_state3 === template diff --git a/vulkan/vulkan_handles.hpp b/vulkan/vulkan_handles.hpp index b81194b..f83368a 100644 --- a/vulkan/vulkan_handles.hpp +++ b/vulkan/vulkan_handles.hpp @@ -1444,6 +1444,17 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; struct SubpassFragmentDensityMapOffsetEndInfoQCOM; + //=== VK_NV_copy_memory_indirect === + struct CopyMemoryIndirectCommandNV; + struct CopyMemoryToImageIndirectCommandNV; + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV; + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + struct DecompressMemoryRegionNV; + struct PhysicalDeviceMemoryDecompressionFeaturesNV; + struct PhysicalDeviceMemoryDecompressionPropertiesNV; + //=== VK_NV_linear_color_attachment === struct PhysicalDeviceLinearColorAttachmentFeaturesNV; @@ -1502,6 +1513,10 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceAmigoProfilingFeaturesSEC; struct AmigoProfilingSubmitInfoSEC; + //=== VK_NV_ray_tracing_invocation_reorder === + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + //=== VK_EXT_mutable_descriptor_type === struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT; using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; @@ -5178,6 +5193,50 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_NV_copy_memory_indirect === + + template + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_memory_decompression === + + template + void decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_extended_dynamic_state3 === template diff --git a/vulkan/vulkan_hash.hpp b/vulkan/vulkan_hash.hpp index 1ac2e2e..021c545 100644 --- a/vulkan/vulkan_hash.hpp +++ b/vulkan/vulkan_hash.hpp @@ -2419,6 +2419,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const & copyMemoryIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const & copyMemoryToImageIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageExtent ); + return seed; + } + }; + template <> struct hash { @@ -2677,6 +2706,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const & decompressMemoryRegionNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.compressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressionMethod ); + return seed; + } + }; + template <> struct hash { @@ -6757,6 +6801,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const & physicalDeviceCopyMemoryIndirectFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.indirectCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const & physicalDeviceCopyMemoryIndirectPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.supportedQueues ); + return seed; + } + }; + template <> struct hash { @@ -8171,6 +8243,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const & physicalDeviceMemoryDecompressionFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.memoryDecompression ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const & physicalDeviceMemoryDecompressionPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.decompressionMethods ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.maxDecompressionIndirectCount ); + return seed; + } + }; + template <> struct hash { @@ -8973,6 +9074,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & physicalDeviceRayTracingInvocationReorderFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.rayTracingInvocationReorder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & + physicalDeviceRayTracingInvocationReorderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.rayTracingInvocationReorderReorderingHint ); + return seed; + } + }; + template <> struct hash { diff --git a/vulkan/vulkan_raii.hpp b/vulkan/vulkan_raii.hpp index bca0fc8..8e86b93 100644 --- a/vulkan/vulkan_raii.hpp +++ b/vulkan/vulkan_raii.hpp @@ -1494,6 +1494,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_clip_space_w_scaling === vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + //=== VK_NV_device_diagnostic_checkpoints === vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); @@ -1518,6 +1522,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_fragment_shading_rate_enums === vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + //=== VK_NV_mesh_shader === vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); @@ -2272,6 +2281,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_clip_space_w_scaling === PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + //=== VK_NV_device_diagnostic_checkpoints === PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; @@ -2297,6 +2310,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_fragment_shading_rate_enums === PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + //=== VK_NV_mesh_shader === PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; @@ -5654,6 +5671,26 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_copy_memory_indirect === + + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_memory_decompression === + + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const + VULKAN_HPP_NOEXCEPT; + + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_extended_dynamic_state3 === void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT; @@ -18534,6 +18571,64 @@ namespace VULKAN_HPP_NAMESPACE return pData; } + //=== VK_NV_copy_memory_indirect === + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyMemoryIndirectNV( + static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( + VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressMemoryRegions.size(), + reinterpret_cast( decompressMemoryRegions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV && + "Function needs extension enabled!" ); + + getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); + } + //=== VK_EXT_extended_dynamic_state3 === VULKAN_HPP_INLINE void diff --git a/vulkan/vulkan_static_assertions.hpp b/vulkan/vulkan_static_assertions.hpp index b6a1e9a..20d1bc8 100644 --- a/vulkan/vulkan_static_assertions.hpp +++ b/vulkan/vulkan_static_assertions.hpp @@ -5803,6 +5803,60 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" ); +//=== VK_NV_copy_memory_indirect === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV ) == sizeof( VkCopyMemoryIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV ) == sizeof( VkCopyMemoryToImageIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV ) == + sizeof( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_memory_decompression === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV ) == sizeof( VkDecompressMemoryRegionNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DecompressMemoryRegionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" ); + //=== VK_NV_linear_color_attachment === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == @@ -6061,6 +6115,24 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" ); +//=== VK_NV_ray_tracing_invocation_reorder === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderFeaturesNV is not nothrow_move_constructible!" ); + //=== VK_EXT_mutable_descriptor_type === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) == diff --git a/vulkan/vulkan_structs.hpp b/vulkan/vulkan_structs.hpp index dd411eb..9daee99 100644 --- a/vulkan/vulkan_structs.hpp +++ b/vulkan/vulkan_structs.hpp @@ -16543,6 +16543,102 @@ namespace VULKAN_HPP_NAMESPACE }; using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + struct CopyMemoryIndirectCommandNV + { + using NativeType = VkCopyMemoryIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress( srcAddress_ ) + , dstAddress( dstAddress_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryIndirectCommandNV( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryIndirectCommandNV & operator=( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryIndirectCommandNV & operator=( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT + { + dstAddress = dstAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, dstAddress, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default; +#else + bool operator==( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size ); +# endif + } + + bool operator!=( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + struct CopyMemoryToAccelerationStructureInfoKHR { using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; @@ -16647,6 +16743,136 @@ namespace VULKAN_HPP_NAMESPACE using Type = CopyMemoryToAccelerationStructureInfoKHR; }; + struct CopyMemoryToImageIndirectCommandNV + { + using NativeType = VkCopyMemoryToImageIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress( srcAddress_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToImageIndirectCommandNV( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToImageIndirectCommandNV & operator=( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToImageIndirectCommandNV & operator=( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryToImageIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default; +#else + bool operator==( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + struct CopyMemoryToMicromapInfoEXT { using NativeType = VkCopyMemoryToMicromapInfoEXT; @@ -18975,6 +19201,126 @@ namespace VULKAN_HPP_NAMESPACE using Type = DebugUtilsObjectTagInfoEXT; }; + struct DecompressMemoryRegionNV + { + using NativeType = VkDecompressMemoryRegionNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress( srcAddress_ ) + , dstAddress( dstAddress_ ) + , compressedSize( compressedSize_ ) + , decompressedSize( decompressedSize_ ) + , decompressionMethod( decompressionMethod_ ) + { + } + + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DecompressMemoryRegionNV( *reinterpret_cast( &rhs ) ) + { + } + + DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT + { + dstAddress = dstAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT + { + compressedSize = compressedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT + { + decompressedSize = decompressedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & + setDecompressionMethod( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT + { + decompressionMethod = decompressionMethod_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DecompressMemoryRegionNV const & ) const = default; +#else + bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) && + ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod ); +# endif + } + + bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod = {}; + }; + struct DedicatedAllocationBufferCreateInfoNV { using NativeType = VkDedicatedAllocationBufferCreateInfoNV; @@ -50483,6 +50829,186 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; }; + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , indirectCopy( indirectCopy_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT + { + indirectCopy = indirectCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indirectCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indirectCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; + }; + + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportedQueues( supportedQueues_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectPropertiesNV( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedQueues == rhs.supportedQueues ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectPropertiesNV; + }; + struct PhysicalDeviceCornerSampledImageFeaturesNV { using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; @@ -60079,6 +60605,192 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; }; + struct PhysicalDeviceMemoryDecompressionFeaturesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryDecompression( memoryDecompression_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionFeaturesNV( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & + setMemoryDecompression( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT + { + memoryDecompression = memoryDecompression_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryDecompression ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryDecompression == rhs.memoryDecompression ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionFeaturesNV; + }; + + struct PhysicalDeviceMemoryDecompressionPropertiesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, + uint64_t maxDecompressionIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , decompressionMethods( decompressionMethods_ ) + , maxDecompressionIndirectCount( maxDecompressionIndirectCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryDecompressionPropertiesNV( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionPropertiesNV( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethods == rhs.decompressionMethods ) && + ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods = {}; + uint64_t maxDecompressionIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionPropertiesNV; + }; + struct PhysicalDeviceMemoryPriorityFeaturesEXT { using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; @@ -65340,6 +66052,194 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceRayQueryFeaturesKHR; }; + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingInvocationReorder( rayTracingInvocationReorder_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + setRayTracingInvocationReorder( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingInvocationReorder = rayTracingInvocationReorder_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + }; + + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingInvocationReorderReorderingHint( rayTracingInvocationReorderReorderingHint_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + }; + struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR { using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; diff --git a/vulkan/vulkan_to_string.hpp b/vulkan/vulkan_to_string.hpp index 2bcc89d..f3862a2 100644 --- a/vulkan/vulkan_to_string.hpp +++ b/vulkan/vulkan_to_string.hpp @@ -3087,6 +3087,20 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryDecompressionMethodFlagBitsNV::eGdeflate10 ) + result += "Gdeflate10 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //=== VK_NV_optical_flow === VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value ) @@ -3974,6 +3988,10 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM"; case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM"; case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM: return "SubpassFragmentDensityMapOffsetEndInfoQCOM"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV: return "PhysicalDeviceCopyMemoryIndirectFeaturesNV"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV: return "PhysicalDeviceCopyMemoryIndirectPropertiesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV: return "PhysicalDeviceMemoryDecompressionFeaturesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV: return "PhysicalDeviceMemoryDecompressionPropertiesNV"; case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV"; case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT"; case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM"; @@ -4003,6 +4021,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM"; case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC"; case StructureType::eAmigoProfilingSubmitInfoSEC: return "AmigoProfilingSubmitInfoSEC"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV: return "PhysicalDeviceRayTracingInvocationReorderFeaturesNV"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV: return "PhysicalDeviceRayTracingInvocationReorderPropertiesNV"; case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT"; case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT"; case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM: return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM"; @@ -8048,6 +8068,17 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value ) + { + switch ( value ) + { + case MemoryDecompressionMethodFlagBitsNV::eGdeflate10: return "Gdeflate10"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_EXT_subpass_merge_feedback === VULKAN_HPP_INLINE std::string to_string( SubpassMergeStatusEXT value ) @@ -8173,5 +8204,17 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_NV_ray_tracing_invocation_reorder === + + VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value ) + { + switch ( value ) + { + case RayTracingInvocationReorderModeNV::eNone: return "None"; + case RayTracingInvocationReorderModeNV::eReorder: return "Reorder"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + } // namespace VULKAN_HPP_NAMESPACE #endif