From 29723f90a127ff08d9099855378162f04b4ffddd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20S=C3=BC=C3=9Fenbach?= Date: Mon, 19 Feb 2024 22:00:57 +0100 Subject: [PATCH] Update Vulkan-Headers to v1.3.278 (#1810) --- Vulkan-Headers | 2 +- VulkanHppGenerator.cpp | 38 +- vulkan/vulkan.cppm | 17 +- vulkan/vulkan.hpp | 75 +- vulkan/vulkan_enums.hpp | 13 +- vulkan/vulkan_extension_inspection.hpp | 28 +- vulkan/vulkan_funcs.hpp | 89 +- vulkan/vulkan_handles.hpp | 70 +- vulkan/vulkan_hash.hpp | 57 + vulkan/vulkan_raii.hpp | 62 +- vulkan/vulkan_static_assertions.hpp | 32 + vulkan/vulkan_structs.hpp | 1537 +++++++++++++++++++----- vulkan/vulkan_to_string.hpp | 44 +- vulkan/vulkansc_structs.hpp | 665 ++++++++-- 14 files changed, 2249 insertions(+), 480 deletions(-) diff --git a/Vulkan-Headers b/Vulkan-Headers index 5a5c9a6..31aa7f6 160000 --- a/Vulkan-Headers +++ b/Vulkan-Headers @@ -1 +1 @@ -Subproject commit 5a5c9a643484d888873e32c5d7d484fae8e71d3d +Subproject commit 31aa7f634b052d87ede4664053e85f3f4d1d50d3 diff --git a/VulkanHppGenerator.cpp b/VulkanHppGenerator.cpp index a638102..c81a2a8 100644 --- a/VulkanHppGenerator.cpp +++ b/VulkanHppGenerator.cpp @@ -10720,15 +10720,35 @@ std::string VulkanHppGenerator::generateStructConstructorsEnhanced( std::pairarraySizes.size() == 1 ); - static const std::string copyOpsTemplate = R"( + if ( mit->lenExpressions[0] == "null-terminated" ) + { + static const std::string strcpyTemplate = R"( VULKAN_HPP_ASSERT( ${memberName}_.size() < ${arraySize} ); - ${copyOp}( ${memberName}, ${memberSize}, ${memberName}_.data(), ${memberName}_.size() );)"; +#if defined( WIN32 ) + strncpy_s( ${memberName}, ${memberSize}, ${memberName}_.data(), ${memberName}_.size() ); +#else + strncpy( ${memberName}, ${memberName}_.data(), std::min( ${memberSize}, ${memberName}_.size() ) ); +#endif +)"; - copyOps += replaceWithMap( copyOpsTemplate, - { { "arraySize", mit->arraySizes[0] }, - { "copyOp", mit->lenExpressions[0] == "null-terminated" ? "strncpy_s" : "memcpy_s" }, - { "memberName", mit->name }, - { "memberSize", mit->arraySizes[0] } } ); + copyOps += + replaceWithMap( strcpyTemplate, { { "arraySize", mit->arraySizes[0] }, { "memberName", mit->name }, { "memberSize", mit->arraySizes[0] } } ); + } + else + { + static const std::string memcpyTemplate = R"( + VULKAN_HPP_ASSERT( ${memberName}_.size() < ${arraySize} ); + memcpy( ${memberName}, ${memberName}_.data(), ${lenExpression} * sizeof( ${argumentType} ) );)"; + + std::string arraySizeExpression = ( mit->lenExpressions[0] == "null-terminated" ) + ? ( "std::min( " + mit->name + "_.size(), " + mit->arraySizes[0] + " )" ) + : ( mit->lenExpressions[0] + " * sizeof( " + argumentType + " )" ); + copyOps += replaceWithMap( memcpyTemplate, + { { "arraySize", mit->arraySizes[0] }, + { "argumentType", argumentType }, + { "lenExpression", mit->lenExpressions[0] }, + { "memberName", mit->name } } ); + } } } else @@ -11447,7 +11467,11 @@ std::string VulkanHppGenerator::generateStructSetter( std::string const & struct ${structureName} & set${ArrayName}( std::string const & ${arrayName}_ ) VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( ${arrayName}_.size() < ${arraySize} ); +#if defined( WIN32 ) strncpy_s( ${arrayName}, ${arraySize}, ${arrayName}_.data(), ${arrayName}_.size() ); +#else + strncpy( ${arrayName}, ${arrayName}_.data(), std::min( ${arraySize}, ${arrayName}_.size() ) ); +#endif return *this; } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ diff --git a/vulkan/vulkan.cppm b/vulkan/vulkan.cppm index c24ddce..a408a68 100644 --- a/vulkan/vulkan.cppm +++ b/vulkan/vulkan.cppm @@ -905,7 +905,6 @@ export namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_WIN32_KHR*/ using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError; - using VULKAN_HPP_NAMESPACE::IncompatibleShaderBinaryEXTError; using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError; #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ @@ -1873,6 +1872,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::KHRMapMemory2ExtensionName; using VULKAN_HPP_NAMESPACE::KHRMapMemory2SpecVersion; + //=== VK_EXT_map_memory_placed === + using VULKAN_HPP_NAMESPACE::EXTMapMemoryPlacedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMapMemoryPlacedSpecVersion; + //=== VK_EXT_shader_atomic_float2 === using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2ExtensionName; using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2SpecVersion; @@ -2491,6 +2494,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationExtensionName; using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationSpecVersion; + //=== VK_NV_shader_atomic_float16_vector === + using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorSpecVersion; + //======================== //=== CONSTEXPR VALUEs === //======================== @@ -3671,6 +3678,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; + //=== VK_EXT_map_memory_placed === + using VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT; + //=== VK_EXT_shader_atomic_float2 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT; @@ -4323,6 +4335,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_NV_descriptor_pool_overallocation === using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + //=== VK_NV_shader_atomic_float16_vector === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + //=============== //=== HANDLEs === //=============== diff --git a/vulkan/vulkan.hpp b/vulkan/vulkan.hpp index 2c7f810..7455ac1 100644 --- a/vulkan/vulkan.hpp +++ b/vulkan/vulkan.hpp @@ -56,7 +56,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 277, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 278, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -6549,14 +6549,6 @@ namespace VULKAN_HPP_NAMESPACE CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} }; - class IncompatibleShaderBinaryEXTError : public SystemError - { - public: - IncompatibleShaderBinaryEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} - - IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} - }; - namespace detail { [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) @@ -6599,7 +6591,6 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_WIN32_KHR*/ case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); - case Result::eErrorIncompatibleShaderBinaryEXT: throw IncompatibleShaderBinaryEXTError( message ); default: throw SystemError( make_error_code( result ), message ); } } @@ -8015,6 +8006,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2ExtensionName = VK_KHR_MAP_MEMORY_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2SpecVersion = VK_KHR_MAP_MEMORY_2_SPEC_VERSION; + //=== VK_EXT_map_memory_placed === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedExtensionName = VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedSpecVersion = VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION; + //=== VK_EXT_shader_atomic_float2 === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2ExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2SpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION; @@ -8667,6 +8662,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationExtensionName = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationSpecVersion = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION; + //=== VK_NV_shader_atomic_float16_vector === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorExtensionName = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -12833,6 +12832,43 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_map_memory_placed === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_shader_atomic_float2 === template <> struct StructExtends @@ -16552,6 +16588,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_shader_atomic_float16_vector === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL diff --git a/vulkan/vulkan_enums.hpp b/vulkan/vulkan_enums.hpp index 547553a..2ddc619 100644 --- a/vulkan/vulkan_enums.hpp +++ b/vulkan/vulkan_enums.hpp @@ -285,6 +285,7 @@ namespace VULKAN_HPP_NAMESPACE eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR, eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, + eIncompatibleShaderBinaryEXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT }; @@ -1027,6 +1028,9 @@ namespace VULKAN_HPP_NAMESPACE eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, + ePhysicalDeviceMapMemoryPlacedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT, + ePhysicalDeviceMapMemoryPlacedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT, + eMemoryMapPlacedInfoEXT = VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT, ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT, eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT, eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT, @@ -1417,7 +1421,8 @@ namespace VULKAN_HPP_NAMESPACE ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, - ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV + ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, + ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV }; enum class PipelineCacheHeaderVersion @@ -2185,6 +2190,7 @@ namespace VULKAN_HPP_NAMESPACE enum class MemoryMapFlagBits : VkMemoryMapFlags { + ePlacedEXT = VK_MEMORY_MAP_PLACED_BIT_EXT }; using MemoryMapFlags = Flags; @@ -2193,7 +2199,7 @@ namespace VULKAN_HPP_NAMESPACE struct FlagTraits { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = {}; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = MemoryMapFlagBits::ePlacedEXT; }; enum class ImageAspectFlagBits : VkImageAspectFlags @@ -6078,6 +6084,7 @@ namespace VULKAN_HPP_NAMESPACE enum class MemoryUnmapFlagBitsKHR : VkMemoryUnmapFlagsKHR { + eReserveEXT = VK_MEMORY_UNMAP_RESERVE_BIT_EXT }; using MemoryUnmapFlagsKHR = Flags; @@ -6086,7 +6093,7 @@ namespace VULKAN_HPP_NAMESPACE struct FlagTraits { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlagsKHR allFlags = {}; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlagsKHR allFlags = MemoryUnmapFlagBitsKHR::eReserveEXT; }; //=== VK_EXT_surface_maintenance1 === diff --git a/vulkan/vulkan_extension_inspection.hpp b/vulkan/vulkan_extension_inspection.hpp index 75958da..6c5c6ea 100644 --- a/vulkan/vulkan_extension_inspection.hpp +++ b/vulkan/vulkan_extension_inspection.hpp @@ -279,6 +279,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_pipeline_executable_properties", "VK_EXT_host_image_copy", "VK_KHR_map_memory2", + "VK_EXT_map_memory_placed", "VK_EXT_shader_atomic_float2", "VK_EXT_swapchain_maintenance1", "VK_EXT_shader_demote_to_helper_invocation", @@ -432,7 +433,8 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_calibrated_timestamps", "VK_KHR_shader_expect_assume", "VK_KHR_maintenance6", - "VK_NV_descriptor_pool_overallocation" + "VK_NV_descriptor_pool_overallocation", + "VK_NV_shader_atomic_float16_vector" }; return deviceExtensions; } @@ -1398,6 +1400,11 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_copy_commands2", "VK_KHR_format_feature_flags2", } } } } }, + { "VK_EXT_map_memory_placed", + { { "VK_VERSION_1_0", + { { + "VK_KHR_map_memory2", + } } } } }, { "VK_EXT_shader_atomic_float2", { { "VK_VERSION_1_0", { { @@ -2786,14 +2793,15 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_shader_atomic_float" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_KHR_deferred_host_operations" ) || ( extension == "VK_KHR_pipeline_executable_properties" ) || - ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_atomic_float2" ) || - ( extension == "VK_EXT_swapchain_maintenance1" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || - ( extension == "VK_NV_device_generated_commands" ) || ( extension == "VK_NV_inherited_viewport_scissor" ) || - ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || - ( extension == "VK_QCOM_render_pass_transform" ) || ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || - ( extension == "VK_EXT_robustness2" ) || ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || - ( extension == "VK_KHR_pipeline_library" ) || ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || - ( extension == "VK_KHR_present_id" ) || ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || + ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_map_memory_placed" ) || + ( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) || + ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) || + ( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || + ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_QCOM_render_pass_transform" ) || + ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || ( extension == "VK_EXT_robustness2" ) || + ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || ( extension == "VK_KHR_pipeline_library" ) || + ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_KHR_present_id" ) || + ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_video_encode_queue" ) || ( extension == "VK_NV_device_diagnostics_config" ) || ( extension == "VK_QCOM_render_pass_store_ops" ) #if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -2866,7 +2874,7 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) || - ( extension == "VK_NV_descriptor_pool_overallocation" ); + ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) diff --git a/vulkan/vulkan_funcs.hpp b/vulkan/vulkan_funcs.hpp index e1995ab..0a09ecc 100644 --- a/vulkan/vulkan_funcs.hpp +++ b/vulkan/vulkan_funcs.hpp @@ -20667,8 +20667,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( pMemoryUnmapInfo ) ) ); @@ -20676,15 +20676,19 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function requires " ); # endif - d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); + resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + + return createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23210,8 +23214,8 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue - Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) @@ -23222,11 +23226,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); - return ResultValue( static_cast( result ), std::move( maxWorkgroupSize ) ); + return createResultValueType( result, std::move( maxWorkgroupSize ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25388,8 +25390,8 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, Dispatch const & d ) const { @@ -25405,16 +25407,19 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); + resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - return createResultValueType( result, std::move( shaders ) ); + return ResultValue>( static_cast( result ), + std::move( shaders ) ); } template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, ShaderEXTAllocator & shaderEXTAllocator, Dispatch const & d ) const @@ -25431,14 +25436,17 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); + resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - return createResultValueType( result, std::move( shaders ) ); + return ResultValue>( static_cast( result ), + std::move( shaders ) ); } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, Optional allocator, Dispatch const & d ) const { @@ -25454,15 +25462,16 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shader ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" ); + resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - return createResultValueType( result, std::move( shader ) ); + return ResultValue( static_cast( result ), std::move( shader ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType, ShaderEXTAllocator>>::type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, Dispatch const & d ) const @@ -25479,7 +25488,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); + resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders; uniqueShaders.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -25487,15 +25498,15 @@ namespace VULKAN_HPP_NAMESPACE { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); } - return createResultValueType( result, std::move( uniqueShaders ) ); + return ResultValue, ShaderEXTAllocator>>( + static_cast( result ), std::move( uniqueShaders ) ); } template < typename Dispatch, typename ShaderEXTAllocator, typename std::enable_if>::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType, ShaderEXTAllocator>>::type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, ShaderEXTAllocator & shaderEXTAllocator, @@ -25513,7 +25524,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); + resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); uniqueShaders.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -25521,12 +25534,13 @@ namespace VULKAN_HPP_NAMESPACE { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); } - return createResultValueType( result, std::move( uniqueShaders ) ); + return ResultValue, ShaderEXTAllocator>>( + static_cast( result ), std::move( uniqueShaders ) ); } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, Optional allocator, Dispatch const & d ) const { @@ -25542,10 +25556,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shader ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" ); + resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - return createResultValueType( result, - UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); + return ResultValue>( + static_cast( result ), + UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ diff --git a/vulkan/vulkan_handles.hpp b/vulkan/vulkan_handles.hpp index 0c841e0..945bd34 100644 --- a/vulkan/vulkan_handles.hpp +++ b/vulkan/vulkan_handles.hpp @@ -1163,6 +1163,11 @@ namespace VULKAN_HPP_NAMESPACE struct MemoryMapInfoKHR; struct MemoryUnmapInfoKHR; + //=== VK_EXT_map_memory_placed === + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT; + struct PhysicalDeviceMapMemoryPlacedPropertiesEXT; + struct MemoryMapPlacedInfoEXT; + //=== VK_EXT_shader_atomic_float2 === struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT; @@ -1815,6 +1820,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_descriptor_pool_overallocation === struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + //=== VK_NV_shader_atomic_float16_vector === + struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + //=================================== //=== HANDLE forward declarations === //=================================== @@ -12754,12 +12762,12 @@ namespace VULKAN_HPP_NAMESPACE #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + typename ResultValueType::type unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_EXT_swapchain_maintenance1 === @@ -13318,7 +13326,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD ResultValue + VULKAN_HPP_NODISCARD typename ResultValueType::type getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13736,44 +13744,44 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue> + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - ShaderEXTAllocator & shaderEXTAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue> + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue + createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template >> - VULKAN_HPP_NODISCARD typename ResultValueType, ShaderEXTAllocator>>::type - createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template >, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType, ShaderEXTAllocator>>::type - createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - ShaderEXTAllocator & shaderEXTAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue> + createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ diff --git a/vulkan/vulkan_hash.hpp b/vulkan/vulkan_hash.hpp index bebd7cb..7bb3216 100644 --- a/vulkan/vulkan_hash.hpp +++ b/vulkan/vulkan_hash.hpp @@ -6815,6 +6815,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT const & memoryMapPlacedInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pPlacedAddress ); + return seed; + } + }; + template <> struct hash { @@ -9683,6 +9696,36 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT const & physicalDeviceMapMemoryPlacedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapPlaced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapRangePlaced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryUnmapReserve ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT const & physicalDeviceMapMemoryPlacedPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.minPlacedMemoryMapAlignment ); + return seed; + } + }; + template <> struct hash { @@ -10949,6 +10992,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & physicalDeviceShaderAtomicFloat16VectorFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.shaderFloat16VectorAtomics ); + return seed; + } + }; + template <> struct hash { diff --git a/vulkan/vulkan_raii.hpp b/vulkan/vulkan_raii.hpp index b55878b..22cb9c4 100644 --- a/vulkan/vulkan_raii.hpp +++ b/vulkan/vulkan_raii.hpp @@ -4311,7 +4311,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const; - void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT; + void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const; //=== VK_EXT_swapchain_maintenance1 === @@ -10409,7 +10409,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_HUAWEI_subpass_shading === - VULKAN_HPP_NODISCARD std::pair getSubpassShadingMaxWorkgroupSizeHUAWEI() const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const; private: VULKAN_HPP_NAMESPACE::Device m_device = {}; @@ -10806,10 +10806,12 @@ namespace VULKAN_HPP_NAMESPACE ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkShaderEXT shader, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) : m_device( device ) , m_shader( shader ) , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) , m_dispatcher( device.getDispatcher() ) { } @@ -10828,6 +10830,7 @@ namespace VULKAN_HPP_NAMESPACE : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) , m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) ) , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10841,6 +10844,7 @@ namespace VULKAN_HPP_NAMESPACE std::swap( m_device, rhs.m_device ); std::swap( m_shader, rhs.m_shader ); std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; @@ -10863,20 +10867,27 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkDestroyShaderEXT( static_cast( m_device ), static_cast( m_shader ), reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_shader = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_shader = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; } VULKAN_HPP_NAMESPACE::ShaderEXT release() { - m_device = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr ); } + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; @@ -10893,6 +10904,7 @@ namespace VULKAN_HPP_NAMESPACE std::swap( m_device, rhs.m_device ); std::swap( m_shader, rhs.m_shader ); std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); std::swap( m_dispatcher, rhs.m_dispatcher ); } @@ -10901,10 +10913,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getBinaryData() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; }; class ShaderEXTs : public std::vector @@ -20221,11 +20234,13 @@ namespace VULKAN_HPP_NAMESPACE return pData; } - VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const { VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function requires " ); - getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); } //=== VK_EXT_swapchain_maintenance1 === @@ -21311,8 +21326,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_HUAWEI_subpass_shading === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair - RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const { VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && "Function requires " ); @@ -21320,11 +21334,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &maxWorkgroupSize ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); - return std::make_pair( static_cast( result ), std::move( maxWorkgroupSize ) ); + return maxWorkgroupSize; } VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT @@ -22521,7 +22533,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) ) { # if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) return VULKAN_HPP_UNEXPECTED( result ); @@ -22534,7 +22546,7 @@ namespace VULKAN_HPP_NAMESPACE shadersRAII.reserve( shaders.size() ); for ( auto & shader : shaders ) { - shadersRAII.emplace_back( *this, *reinterpret_cast( &shader ), allocator ); + shadersRAII.emplace_back( *this, *reinterpret_cast( &shader ), allocator, result ); } return shadersRAII; } @@ -22551,7 +22563,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shader ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) ) { # if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) return VULKAN_HPP_UNEXPECTED( result ); diff --git a/vulkan/vulkan_static_assertions.hpp b/vulkan/vulkan_static_assertions.hpp index 30f287a..07c3078 100644 --- a/vulkan/vulkan_static_assertions.hpp +++ b/vulkan/vulkan_static_assertions.hpp @@ -4666,6 +4666,28 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "MemoryUnmapInfoKHR is not nothrow_move_constructible!" ); +//=== VK_EXT_map_memory_placed === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMapMemoryPlacedFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMapMemoryPlacedPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT ) == sizeof( VkMemoryMapPlacedInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryMapPlacedInfoEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_shader_atomic_float2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == @@ -7452,4 +7474,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_NV_shader_atomic_float16_vector === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV is not nothrow_move_constructible!" ); + #endif diff --git a/vulkan/vulkan_structs.hpp b/vulkan/vulkan_structs.hpp index 826ebf1..e262669 100644 --- a/vulkan/vulkan_structs.hpp +++ b/vulkan/vulkan_structs.hpp @@ -27650,6 +27650,19 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceFaultVendorInfoEXT( std::string const & description_, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} ) + : vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ ) + { + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -27666,6 +27679,19 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceFaultVendorInfoEXT & setDescription( std::string const & description_ ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT { vendorFaultCode = vendorFaultCode_; @@ -27702,22 +27728,28 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceFaultVendorInfoEXT const & ) const = default; -#else + std::strong_ordering operator<=>( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = vendorFaultCode <=> rhs.vendorFaultCode; cmp != 0 ) + return cmp; + if ( auto cmp = vendorFaultData <=> rhs.vendorFaultData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( description == rhs.description ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData ); -# endif + return ( strcmp( description, rhs.description ) == 0 ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData ); } bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; @@ -27829,23 +27861,35 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceFaultInfoEXT const & ) const = default; -#else + std::strong_ordering operator<=>( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pAddressInfos <=> rhs.pAddressInfos; cmp != 0 ) + return cmp; + if ( auto cmp = pVendorInfos <=> rhs.pVendorInfos; cmp != 0 ) + return cmp; + if ( auto cmp = pVendorBinaryData <=> rhs.pVendorBinaryData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( description == rhs.description ) && ( pAddressInfos == rhs.pAddressInfos ) && + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( description, rhs.description ) == 0 ) && ( pAddressInfos == rhs.pAddressInfos ) && ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData ); -# endif } bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT; @@ -35620,6 +35664,18 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExtensionProperties( std::string const & extensionName_, uint32_t specVersion_ = {} ) : specVersion( specVersion_ ) + { + VULKAN_HPP_ASSERT( extensionName_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( extensionName, VK_MAX_EXTENSION_NAME_SIZE, extensionName_.data(), extensionName_.size() ); +# else + strncpy( extensionName, extensionName_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, extensionName_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -35652,22 +35708,26 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExtensionProperties const & ) const = default; -#else + std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); -# endif + return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion ); } bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; @@ -39042,98 +39102,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT - { - presentID = presentID_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setInputSampleTimeUs( uint64_t inputSampleTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - inputSampleTimeUs = inputSampleTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setSimStartTimeUs( uint64_t simStartTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - simStartTimeUs = simStartTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setSimEndTimeUs( uint64_t simEndTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - simEndTimeUs = simEndTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setRenderSubmitStartTimeUs( uint64_t renderSubmitStartTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - renderSubmitStartTimeUs = renderSubmitStartTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setRenderSubmitEndTimeUs( uint64_t renderSubmitEndTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - renderSubmitEndTimeUs = renderSubmitEndTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentStartTimeUs( uint64_t presentStartTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - presentStartTimeUs = presentStartTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentEndTimeUs( uint64_t presentEndTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - presentEndTimeUs = presentEndTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setDriverStartTimeUs( uint64_t driverStartTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - driverStartTimeUs = driverStartTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setDriverEndTimeUs( uint64_t driverEndTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - driverEndTimeUs = driverEndTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setOsRenderQueueStartTimeUs( uint64_t osRenderQueueStartTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - osRenderQueueStartTimeUs = osRenderQueueStartTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setOsRenderQueueEndTimeUs( uint64_t osRenderQueueEndTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - osRenderQueueEndTimeUs = osRenderQueueEndTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setGpuRenderStartTimeUs( uint64_t gpuRenderStartTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - gpuRenderStartTimeUs = gpuRenderStartTimeUs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setGpuRenderEndTimeUs( uint64_t gpuRenderEndTimeUs_ ) VULKAN_HPP_NOEXCEPT - { - gpuRenderEndTimeUs = gpuRenderEndTimeUs_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -49714,6 +49682,26 @@ namespace VULKAN_HPP_NAMESPACE LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerProperties( std::string const & layerName_, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::string const & description_ = {} ) + : specVersion( specVersion_ ), implementationVersion( implementationVersion_ ) + { + VULKAN_HPP_ASSERT( layerName_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( layerName, VK_MAX_EXTENSION_NAME_SIZE, layerName_.data(), layerName_.size() ); +# else + strncpy( layerName, layerName_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, layerName_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -49749,23 +49737,31 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( LayerProperties const & ) const = default; -#else + std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && - ( description == rhs.description ); -# endif + return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && + ( strcmp( description, rhs.description ) == 0 ); } bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; @@ -51690,6 +51686,102 @@ namespace VULKAN_HPP_NAMESPACE using Type = MemoryMapInfoKHR; }; + struct MemoryMapPlacedInfoEXT + { + using NativeType = VkMemoryMapPlacedInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapPlacedInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( void * pPlacedAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pPlacedAddress( pPlacedAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMapPlacedInfoEXT( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryMapPlacedInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryMapPlacedInfoEXT & operator=( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryMapPlacedInfoEXT & operator=( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPPlacedAddress( void * pPlacedAddress_ ) VULKAN_HPP_NOEXCEPT + { + pPlacedAddress = pPlacedAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryMapPlacedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapPlacedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pPlacedAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMapPlacedInfoEXT const & ) const = default; +#else + bool operator==( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPlacedAddress == rhs.pPlacedAddress ); +# endif + } + + bool operator!=( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapPlacedInfoEXT; + const void * pNext = {}; + void * pPlacedAddress = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMapPlacedInfoEXT; + }; + struct MemoryOpaqueCaptureAddressAllocateInfo { using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; @@ -55069,6 +55161,37 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_, + std::string const & name_, + std::string const & category_ = {}, + std::string const & description_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ) + { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( category_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( category, VK_MAX_DESCRIPTION_SIZE, category_.data(), category_.size() ); +# else + strncpy( category, category_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, category_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -55106,23 +55229,35 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; -#else + std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && ( category == rhs.category ) && - ( description == rhs.description ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); } bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; @@ -55820,20 +55955,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT - { - data = data_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -62541,6 +62662,30 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_, + std::string const & driverName_, + std::string const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), driverID( driverID_ ), conformanceVersion( conformanceVersion_ ) + { + VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() ); +# else + strncpy( driverName, driverName_.data(), std::min( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() ); +# else + strncpy( driverInfo, driverInfo_.data(), std::min( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -62578,23 +62723,35 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && - ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ); } bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; @@ -67403,6 +67560,19 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceGroupProperties( VULKAN_HPP_NAMESPACE::ArrayProxy const & physicalDevices_, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , physicalDeviceCount( std::min( static_cast( physicalDevices_.size() ), VK_MAX_DEVICE_GROUP_SIZE ) ) + , subsetAllocation( subsetAllocation_ ) + { + VULKAN_HPP_ASSERT( physicalDevices_.size() < VK_MAX_DEVICE_GROUP_SIZE ); + memcpy( physicalDevices, physicalDevices_.data(), physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -67439,23 +67609,37 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < physicalDeviceCount; ++i ) + { + if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && - ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); -# endif + ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) && + ( subsetAllocation == rhs.subsetAllocation ); } bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; @@ -71836,6 +72020,211 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceMaintenance6PropertiesKHR; }; + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT + { + using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryMapPlaced( memoryMapPlaced_ ) + , memoryMapRangePlaced( memoryMapRangePlaced_ ) + , memoryUnmapReserve( memoryUnmapReserve_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMapMemoryPlacedFeaturesEXT( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMapMemoryPlacedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapPlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ ) VULKAN_HPP_NOEXCEPT + { + memoryMapPlaced = memoryMapPlaced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & + setMemoryMapRangePlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ ) VULKAN_HPP_NOEXCEPT + { + memoryMapRangePlaced = memoryMapRangePlaced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & + setMemoryUnmapReserve( VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ ) VULKAN_HPP_NOEXCEPT + { + memoryUnmapReserve = memoryUnmapReserve_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryMapPlaced, memoryMapRangePlaced, memoryUnmapReserve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryMapPlaced == rhs.memoryMapPlaced ) && + ( memoryMapRangePlaced == rhs.memoryMapRangePlaced ) && ( memoryUnmapReserve == rhs.memoryUnmapReserve ); +# endif + } + + bool operator!=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT; + }; + + struct PhysicalDeviceMapMemoryPlacedPropertiesEXT + { + using NativeType = VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minPlacedMemoryMapAlignment( minPlacedMemoryMapAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minPlacedMemoryMapAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minPlacedMemoryMapAlignment == rhs.minPlacedMemoryMapAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT; + }; + struct PhysicalDeviceMemoryBudgetPropertiesEXT { using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; @@ -72232,6 +72621,19 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceMemoryProperties( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryHeaps_ = {} ) + : memoryTypeCount( std::min( static_cast( memoryTypes_.size() ), VK_MAX_MEMORY_TYPES ) ) + , memoryHeapCount( std::min( static_cast( memoryHeaps_.size() ), VK_MAX_MEMORY_HEAPS ) ) + { + VULKAN_HPP_ASSERT( memoryTypes_.size() < VK_MAX_MEMORY_TYPES ); + memcpy( memoryTypes, memoryTypes_.data(), memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ); + VULKAN_HPP_ASSERT( memoryHeaps_.size() < VK_MAX_MEMORY_HEAPS ); + memcpy( memoryHeaps, memoryHeaps_.data(), memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -72267,23 +72669,39 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryTypeCount; ++i ) + { + if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryHeapCount; ++i ) + { + if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && ( memoryHeapCount == rhs.memoryHeapCount ) && - ( memoryHeaps == rhs.memoryHeaps ); -# endif + return ( memoryTypeCount == rhs.memoryTypeCount ) && + ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && + ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 ); } bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: uint32_t memoryTypeCount = {}; @@ -77074,6 +77492,34 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceProperties( uint32_t apiVersion_, + uint32_t driverVersion_, + uint32_t vendorID_, + uint32_t deviceID_, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_, + std::string const & deviceName_, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) + : apiVersion( apiVersion_ ) + , driverVersion( driverVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , deviceType( deviceType_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + , limits( limits_ ) + , sparseProperties( sparseProperties_ ) + { + VULKAN_HPP_ASSERT( deviceName_.size() < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.data(), deviceName_.size() ); +# else + strncpy( deviceName, deviceName_.data(), std::min( VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -77114,24 +77560,42 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProperties const & ) const = default; -#else + std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) + return cmp; + if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) + return cmp; + if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) + return cmp; + if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && - ( deviceType == rhs.deviceType ) && ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); -# endif } bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: uint32_t apiVersion = {}; @@ -80218,6 +80682,106 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderFloat16VectorAtomics( shaderFloat16VectorAtomics_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & + operator=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & operator=( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & + setShaderFloat16VectorAtomics( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16VectorAtomics = shaderFloat16VectorAtomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16VectorAtomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16VectorAtomics == rhs.shaderFloat16VectorAtomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + }; + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT { using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; @@ -86214,6 +86778,45 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceToolProperties( std::string const & name_, + std::string const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, + std::string const & description_ = {}, + std::string const & layer_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), purposes( purposes_ ) + { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_EXTENSION_NAME_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( version_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( version, VK_MAX_EXTENSION_NAME_SIZE, version_.data(), version_.size() ); +# else + strncpy( version, version_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, version_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( layer_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( layer, VK_MAX_EXTENSION_NAME_SIZE, layer_.data(), layer_.size() ); +# else + strncpy( layer, layer_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, layer_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -86252,23 +86855,37 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceToolProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && ( purposes == rhs.purposes ) && - ( description == rhs.description ) && ( layer == rhs.layer ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) && + ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 ); } bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; @@ -88766,6 +89383,129 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_, + std::string const & driverName_, + std::string const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , driverID( driverID_ ) + , conformanceVersion( conformanceVersion_ ) + , denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + , supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + { + VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() ); +# else + strncpy( driverName, driverName_.data(), std::min( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() ); +# else + strncpy( driverInfo, driverInfo_.data(), std::min( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -88904,15 +89644,125 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 ) + return cmp; + if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 ) + return cmp; + if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 ) + return cmp; + if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 ) + return cmp; + if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && - ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) && + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && @@ -88952,14 +89802,12 @@ namespace VULKAN_HPP_NAMESPACE ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); -# endif } bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; @@ -91951,13 +92799,26 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template - PipelineExecutableInternalRepresentationKHR( std::array const & name_, - std::array const & description_, - VULKAN_HPP_NAMESPACE::Bool32 isText_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, - void * pNext_ = nullptr ) - : pNext( pNext_ ), name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + PipelineExecutableInternalRepresentationKHR( std::string const & name_, + std::string const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -91999,23 +92860,37 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; -#else + std::strong_ordering operator<=>( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = isText <=> rhs.isText; cmp != 0 ) + return cmp; + if ( auto cmp = dataSize <=> rhs.dataSize; cmp != 0 ) + return cmp; + if ( auto cmp = pData <=> rhs.pData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( description == rhs.description ) && ( isText == rhs.isText ) && - ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ) && + ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); } bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; @@ -92061,6 +92936,30 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_, + std::string const & name_, + std::string const & description_ = {}, + uint32_t subgroupSize_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), stages( stages_ ), subgroupSize( subgroupSize_ ) + { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -92098,23 +92997,35 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; -#else + std::strong_ordering operator<=>( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = stages <=> rhs.stages; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = subgroupSize <=> rhs.subgroupSize; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && ( description == rhs.description ) && - ( subgroupSize == rhs.subgroupSize ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( description, rhs.description ) == 0 ) && ( subgroupSize == rhs.subgroupSize ); } bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; @@ -92223,6 +93134,31 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineExecutableStatisticKHR( + std::string const & name_, + std::string const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), format( format_ ), value( value_ ) + { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -98663,6 +99599,16 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueueFamilyGlobalPriorityPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & priorities_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), priorityCount( std::min( static_cast( priorities_.size() ), VK_MAX_GLOBAL_PRIORITY_SIZE_KHR ) ) + { + VULKAN_HPP_ASSERT( priorities_.size() < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR ); + memcpy( priorities, priorities_.data(), priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -98672,27 +99618,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT - { - priorityCount = priorityCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & - setPriorities( std::array priorities_ ) VULKAN_HPP_NOEXCEPT - { - priorities = priorities_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -98719,22 +99644,34 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & ) const = default; -#else + std::strong_ordering operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = priorityCount <=> rhs.priorityCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < priorityCount; ++i ) + { + if ( auto cmp = priorities[i] <=> rhs.priorities[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && ( priorities == rhs.priorities ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && + ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR ) ) == 0 ); } bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; @@ -103263,6 +104200,21 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_, + std::string const & description_, + uint32_t postMergeIndex_ = {} ) + : subpassMergeStatus( subpassMergeStatus_ ), postMergeIndex( postMergeIndex_ ) + { + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -103296,22 +104248,28 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassSubpassFeedbackInfoEXT const & ) const = default; -#else + std::strong_ordering operator<=>( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = subpassMergeStatus <=> rhs.subpassMergeStatus; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = postMergeIndex <=> rhs.postMergeIndex; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( description == rhs.description ) && ( postMergeIndex == rhs.postMergeIndex ); -# endif + return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( strcmp( description, rhs.description ) == 0 ) && ( postMergeIndex == rhs.postMergeIndex ); } bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged; @@ -108092,6 +109050,15 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & identifier_, void * pNext_ = nullptr ) + : pNext( pNext_ ), identifierSize( std::min( static_cast( identifier_.size() ), VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT ) ) + { + VULKAN_HPP_ASSERT( identifier_.size() < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT ); + memcpy( identifier, identifier_.data(), identifierSize * sizeof( uint8_t ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -108127,22 +109094,34 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderModuleIdentifierEXT const & ) const = default; -#else + std::strong_ordering operator<=>( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = identifierSize <=> rhs.identifierSize; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < identifierSize; ++i ) + { + if ( auto cmp = identifier[i] <=> rhs.identifier[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( identifier == rhs.identifier ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && + ( memcmp( identifier, rhs.identifier, identifierSize * sizeof( uint8_t ) ) == 0 ); } bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT; @@ -110063,6 +111042,26 @@ namespace VULKAN_HPP_NAMESPACE return *this; } +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); diff --git a/vulkan/vulkan_to_string.hpp b/vulkan/vulkan_to_string.hpp index b7c08c7..cb3ae0d 100644 --- a/vulkan/vulkan_to_string.hpp +++ b/vulkan/vulkan_to_string.hpp @@ -382,9 +382,16 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryMapFlagBits::ePlacedEXT ) + result += "PlacedEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) @@ -2810,9 +2817,16 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_map_memory2 === - VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryUnmapFlagBitsKHR::eReserveEXT ) + result += "ReserveEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } //=== VK_EXT_surface_maintenance1 === @@ -3579,7 +3593,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR"; case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT"; - case Result::eErrorIncompatibleShaderBinaryEXT: return "ErrorIncompatibleShaderBinaryEXT"; + case Result::eIncompatibleShaderBinaryEXT: return "IncompatibleShaderBinaryEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4184,6 +4198,9 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eHostImageCopyDevicePerformanceQueryEXT: return "HostImageCopyDevicePerformanceQueryEXT"; case StructureType::eMemoryMapInfoKHR: return "MemoryMapInfoKHR"; case StructureType::eMemoryUnmapInfoKHR: return "MemoryUnmapInfoKHR"; + case StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT: return "PhysicalDeviceMapMemoryPlacedFeaturesEXT"; + case StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT: return "PhysicalDeviceMapMemoryPlacedPropertiesEXT"; + case StructureType::eMemoryMapPlacedInfoEXT: return "MemoryMapPlacedInfoEXT"; case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT"; case StructureType::eSurfacePresentModeEXT: return "SurfacePresentModeEXT"; case StructureType::eSurfacePresentScalingCapabilitiesEXT: return "SurfacePresentScalingCapabilitiesEXT"; @@ -4532,6 +4549,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT"; case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; + case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV: return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5151,9 +5169,13 @@ namespace VULKAN_HPP_NAMESPACE } } - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits value ) { - return "(void)"; + switch ( value ) + { + case MemoryMapFlagBits::ePlacedEXT: return "PlacedEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) @@ -8111,9 +8133,13 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_map_memory2 === - VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR ) + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR value ) { - return "(void)"; + switch ( value ) + { + case MemoryUnmapFlagBitsKHR::eReserveEXT: return "ReserveEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } //=== VK_EXT_surface_maintenance1 === diff --git a/vulkan/vulkansc_structs.hpp b/vulkan/vulkansc_structs.hpp index 15844e3..939ec8b 100644 --- a/vulkan/vulkansc_structs.hpp +++ b/vulkan/vulkansc_structs.hpp @@ -17751,6 +17751,18 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExtensionProperties( std::string const & extensionName_, uint32_t specVersion_ = {} ) : specVersion( specVersion_ ) + { + VULKAN_HPP_ASSERT( extensionName_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( extensionName, VK_MAX_EXTENSION_NAME_SIZE, extensionName_.data(), extensionName_.size() ); +# else + strncpy( extensionName, extensionName_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, extensionName_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -17783,22 +17795,26 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExtensionProperties const & ) const = default; -#else + std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); -# endif + return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion ); } bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; @@ -26364,6 +26380,26 @@ namespace VULKAN_HPP_NAMESPACE LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerProperties( std::string const & layerName_, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::string const & description_ = {} ) + : specVersion( specVersion_ ), implementationVersion( implementationVersion_ ) + { + VULKAN_HPP_ASSERT( layerName_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( layerName, VK_MAX_EXTENSION_NAME_SIZE, layerName_.data(), layerName_.size() ); +# else + strncpy( layerName, layerName_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, layerName_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -26399,23 +26435,31 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( LayerProperties const & ) const = default; -#else + std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && - ( description == rhs.description ); -# endif + return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && + ( strcmp( description, rhs.description ) == 0 ); } bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; @@ -28037,6 +28081,37 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_, + std::string const & name_, + std::string const & category_ = {}, + std::string const & description_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ) + { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( category_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( category, VK_MAX_DESCRIPTION_SIZE, category_.data(), category_.size() ); +# else + strncpy( category, category_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, category_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -28074,23 +28149,35 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; -#else + std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && ( category == rhs.category ) && - ( description == rhs.description ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); } bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; @@ -30587,6 +30674,30 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_, + std::string const & driverName_, + std::string const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), driverID( driverID_ ), conformanceVersion( conformanceVersion_ ) + { + VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() ); +# else + strncpy( driverName, driverName_.data(), std::min( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() ); +# else + strncpy( driverInfo, driverInfo_.data(), std::min( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -30624,23 +30735,35 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && - ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ); } bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; @@ -32795,6 +32918,19 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceGroupProperties( VULKAN_HPP_NAMESPACE::ArrayProxy const & physicalDevices_, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , physicalDeviceCount( std::min( static_cast( physicalDevices_.size() ), VK_MAX_DEVICE_GROUP_SIZE ) ) + , subsetAllocation( subsetAllocation_ ) + { + VULKAN_HPP_ASSERT( physicalDevices_.size() < VK_MAX_DEVICE_GROUP_SIZE ); + memcpy( physicalDevices, physicalDevices_.data(), physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -32831,23 +32967,37 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < physicalDeviceCount; ++i ) + { + if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && - ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); -# endif + ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) && + ( subsetAllocation == rhs.subsetAllocation ); } bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; @@ -34927,6 +35077,19 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceMemoryProperties( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryHeaps_ = {} ) + : memoryTypeCount( std::min( static_cast( memoryTypes_.size() ), VK_MAX_MEMORY_TYPES ) ) + , memoryHeapCount( std::min( static_cast( memoryHeaps_.size() ), VK_MAX_MEMORY_HEAPS ) ) + { + VULKAN_HPP_ASSERT( memoryTypes_.size() < VK_MAX_MEMORY_TYPES ); + memcpy( memoryTypes, memoryTypes_.data(), memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ); + VULKAN_HPP_ASSERT( memoryHeaps_.size() < VK_MAX_MEMORY_HEAPS ); + memcpy( memoryHeaps, memoryHeaps_.data(), memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -34962,23 +35125,39 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryTypeCount; ++i ) + { + if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryHeapCount; ++i ) + { + if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && ( memoryHeapCount == rhs.memoryHeapCount ) && - ( memoryHeaps == rhs.memoryHeaps ); -# endif + return ( memoryTypeCount == rhs.memoryTypeCount ) && + ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && + ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 ); } bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: uint32_t memoryTypeCount = {}; @@ -35979,6 +36158,34 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceProperties( uint32_t apiVersion_, + uint32_t driverVersion_, + uint32_t vendorID_, + uint32_t deviceID_, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_, + std::string const & deviceName_, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) + : apiVersion( apiVersion_ ) + , driverVersion( driverVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , deviceType( deviceType_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + , limits( limits_ ) + , sparseProperties( sparseProperties_ ) + { + VULKAN_HPP_ASSERT( deviceName_.size() < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.data(), deviceName_.size() ); +# else + strncpy( deviceName, deviceName_.data(), std::min( VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -36019,24 +36226,42 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProperties const & ) const = default; -#else + std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) + return cmp; + if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) + return cmp; + if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) + return cmp; + if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && - ( deviceType == rhs.deviceType ) && ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); -# endif } bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: uint32_t apiVersion = {}; @@ -39506,6 +39731,45 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceToolProperties( std::string const & name_, + std::string const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, + std::string const & description_ = {}, + std::string const & layer_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ), purposes( purposes_ ) + { + VULKAN_HPP_ASSERT( name_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( name, VK_MAX_EXTENSION_NAME_SIZE, name_.data(), name_.size() ); +# else + strncpy( name, name_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, name_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( version_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( version, VK_MAX_EXTENSION_NAME_SIZE, version_.data(), version_.size() ); +# else + strncpy( version, version_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, version_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( layer_.size() < VK_MAX_EXTENSION_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( layer, VK_MAX_EXTENSION_NAME_SIZE, layer_.data(), layer_.size() ); +# else + strncpy( layer, layer_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, layer_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -39544,23 +39808,37 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceToolProperties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && ( purposes == rhs.purposes ) && - ( description == rhs.description ) && ( layer == rhs.layer ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) && + ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 ); } bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; @@ -41176,6 +41454,129 @@ namespace VULKAN_HPP_NAMESPACE { } +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_, + std::string const & driverName_, + std::string const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , driverID( driverID_ ) + , conformanceVersion( conformanceVersion_ ) + , denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + , supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + { + VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() ); +# else + strncpy( driverName, driverName_.data(), std::min( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) ); +# endif + + VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE ); +# if defined( WIN32 ) + strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() ); +# else + strncpy( driverInfo, driverInfo_.data(), std::min( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) ); +# endif + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -41314,15 +41715,125 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; -#else + std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 ) + return cmp; + if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 ) + return cmp; + if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 ) + return cmp; + if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 ) + return cmp; + if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && - ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) && + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && @@ -41362,14 +41873,12 @@ namespace VULKAN_HPP_NAMESPACE ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); -# endif } bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;