mirror of
https://github.com/KhronosGroup/Vulkan-Hpp.git
synced 2024-10-14 16:32:17 +00:00
Update Vulkan-Headers to v1.3.278 (#1810)
This commit is contained in:
parent
4c130b9051
commit
29723f90a1
@ -1 +1 @@
|
||||
Subproject commit 5a5c9a643484d888873e32c5d7d484fae8e71d3d
|
||||
Subproject commit 31aa7f634b052d87ede4664053e85f3f4d1d50d3
|
@ -10720,15 +10720,35 @@ std::string VulkanHppGenerator::generateStructConstructorsEnhanced( std::pair<st
|
||||
else
|
||||
{
|
||||
assert( mit->arraySizes.size() == 1 );
|
||||
static const std::string copyOpsTemplate = R"(
|
||||
if ( mit->lenExpressions[0] == "null-terminated" )
|
||||
{
|
||||
static const std::string strcpyTemplate = R"(
|
||||
VULKAN_HPP_ASSERT( ${memberName}_.size() < ${arraySize} );
|
||||
${copyOp}( ${memberName}, ${memberSize}, ${memberName}_.data(), ${memberName}_.size() );)";
|
||||
#if defined( WIN32 )
|
||||
strncpy_s( ${memberName}, ${memberSize}, ${memberName}_.data(), ${memberName}_.size() );
|
||||
#else
|
||||
strncpy( ${memberName}, ${memberName}_.data(), std::min<size_t>( ${memberSize}, ${memberName}_.size() ) );
|
||||
#endif
|
||||
)";
|
||||
|
||||
copyOps += replaceWithMap( copyOpsTemplate,
|
||||
{ { "arraySize", mit->arraySizes[0] },
|
||||
{ "copyOp", mit->lenExpressions[0] == "null-terminated" ? "strncpy_s" : "memcpy_s" },
|
||||
{ "memberName", mit->name },
|
||||
{ "memberSize", mit->arraySizes[0] } } );
|
||||
copyOps +=
|
||||
replaceWithMap( strcpyTemplate, { { "arraySize", mit->arraySizes[0] }, { "memberName", mit->name }, { "memberSize", mit->arraySizes[0] } } );
|
||||
}
|
||||
else
|
||||
{
|
||||
static const std::string memcpyTemplate = R"(
|
||||
VULKAN_HPP_ASSERT( ${memberName}_.size() < ${arraySize} );
|
||||
memcpy( ${memberName}, ${memberName}_.data(), ${lenExpression} * sizeof( ${argumentType} ) );)";
|
||||
|
||||
std::string arraySizeExpression = ( mit->lenExpressions[0] == "null-terminated" )
|
||||
? ( "std::min<size_t>( " + mit->name + "_.size(), " + mit->arraySizes[0] + " )" )
|
||||
: ( mit->lenExpressions[0] + " * sizeof( " + argumentType + " )" );
|
||||
copyOps += replaceWithMap( memcpyTemplate,
|
||||
{ { "arraySize", mit->arraySizes[0] },
|
||||
{ "argumentType", argumentType },
|
||||
{ "lenExpression", mit->lenExpressions[0] },
|
||||
{ "memberName", mit->name } } );
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
@ -11447,7 +11467,11 @@ std::string VulkanHppGenerator::generateStructSetter( std::string const & struct
|
||||
${structureName} & set${ArrayName}( std::string const & ${arrayName}_ ) VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( ${arrayName}_.size() < ${arraySize} );
|
||||
#if defined( WIN32 )
|
||||
strncpy_s( ${arrayName}, ${arraySize}, ${arrayName}_.data(), ${arrayName}_.size() );
|
||||
#else
|
||||
strncpy( ${arrayName}, ${arrayName}_.data(), std::min<size_t>( ${arraySize}, ${arrayName}_.size() ) );
|
||||
#endif
|
||||
return *this;
|
||||
}
|
||||
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
@ -905,7 +905,6 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
||||
|
||||
using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError;
|
||||
using VULKAN_HPP_NAMESPACE::IncompatibleShaderBinaryEXTError;
|
||||
using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError;
|
||||
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
||||
|
||||
@ -1873,6 +1872,10 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
using VULKAN_HPP_NAMESPACE::KHRMapMemory2ExtensionName;
|
||||
using VULKAN_HPP_NAMESPACE::KHRMapMemory2SpecVersion;
|
||||
|
||||
//=== VK_EXT_map_memory_placed ===
|
||||
using VULKAN_HPP_NAMESPACE::EXTMapMemoryPlacedExtensionName;
|
||||
using VULKAN_HPP_NAMESPACE::EXTMapMemoryPlacedSpecVersion;
|
||||
|
||||
//=== VK_EXT_shader_atomic_float2 ===
|
||||
using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2ExtensionName;
|
||||
using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2SpecVersion;
|
||||
@ -2491,6 +2494,10 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationExtensionName;
|
||||
using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationSpecVersion;
|
||||
|
||||
//=== VK_NV_shader_atomic_float16_vector ===
|
||||
using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorExtensionName;
|
||||
using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorSpecVersion;
|
||||
|
||||
//========================
|
||||
//=== CONSTEXPR VALUEs ===
|
||||
//========================
|
||||
@ -3671,6 +3678,11 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR;
|
||||
using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR;
|
||||
|
||||
//=== VK_EXT_map_memory_placed ===
|
||||
using VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT;
|
||||
|
||||
//=== VK_EXT_shader_atomic_float2 ===
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
|
||||
|
||||
@ -4323,6 +4335,9 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
//=== VK_NV_descriptor_pool_overallocation ===
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
|
||||
|
||||
//=== VK_NV_shader_atomic_float16_vector ===
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
|
||||
|
||||
//===============
|
||||
//=== HANDLEs ===
|
||||
//===============
|
||||
|
@ -56,7 +56,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
|
||||
# include <span>
|
||||
#endif
|
||||
|
||||
static_assert( VK_HEADER_VERSION == 277, "Wrong VK_HEADER_VERSION!" );
|
||||
static_assert( VK_HEADER_VERSION == 278, "Wrong VK_HEADER_VERSION!" );
|
||||
|
||||
// <tuple> includes <sys/sysmacros.h> through some other header
|
||||
// this results in major(x) being resolved to gnu_dev_major(x)
|
||||
@ -6549,14 +6549,6 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
|
||||
};
|
||||
|
||||
class IncompatibleShaderBinaryEXTError : public SystemError
|
||||
{
|
||||
public:
|
||||
IncompatibleShaderBinaryEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {}
|
||||
|
||||
IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {}
|
||||
};
|
||||
|
||||
namespace detail
|
||||
{
|
||||
[[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message )
|
||||
@ -6599,7 +6591,6 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
# endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
||||
case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message );
|
||||
case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message );
|
||||
case Result::eErrorIncompatibleShaderBinaryEXT: throw IncompatibleShaderBinaryEXTError( message );
|
||||
default: throw SystemError( make_error_code( result ), message );
|
||||
}
|
||||
}
|
||||
@ -8015,6 +8006,10 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2ExtensionName = VK_KHR_MAP_MEMORY_2_EXTENSION_NAME;
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2SpecVersion = VK_KHR_MAP_MEMORY_2_SPEC_VERSION;
|
||||
|
||||
//=== VK_EXT_map_memory_placed ===
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedExtensionName = VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME;
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedSpecVersion = VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION;
|
||||
|
||||
//=== VK_EXT_shader_atomic_float2 ===
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2ExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME;
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2SpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION;
|
||||
@ -8667,6 +8662,10 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationExtensionName = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME;
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationSpecVersion = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION;
|
||||
|
||||
//=== VK_NV_shader_atomic_float16_vector ===
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorExtensionName = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME;
|
||||
VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION;
|
||||
|
||||
} // namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
// clang-format off
|
||||
@ -12833,6 +12832,43 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
};
|
||||
|
||||
//=== VK_EXT_map_memory_placed ===
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceMapMemoryPlacedFeaturesEXT, PhysicalDeviceFeatures2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceMapMemoryPlacedFeaturesEXT, DeviceCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceMapMemoryPlacedPropertiesEXT, PhysicalDeviceProperties2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<MemoryMapPlacedInfoEXT, MemoryMapInfoKHR>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
//=== VK_EXT_shader_atomic_float2 ===
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderAtomicFloat2FeaturesEXT, PhysicalDeviceFeatures2>
|
||||
@ -16552,6 +16588,25 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
};
|
||||
|
||||
//=== VK_NV_shader_atomic_float16_vector ===
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV, PhysicalDeviceFeatures2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV, DeviceCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
|
||||
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
|
||||
|
@ -285,6 +285,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT,
|
||||
eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR,
|
||||
eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT,
|
||||
eIncompatibleShaderBinaryEXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT,
|
||||
eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT
|
||||
};
|
||||
|
||||
@ -1027,6 +1028,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT,
|
||||
eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR,
|
||||
eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR,
|
||||
ePhysicalDeviceMapMemoryPlacedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT,
|
||||
ePhysicalDeviceMapMemoryPlacedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT,
|
||||
eMemoryMapPlacedInfoEXT = VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT,
|
||||
ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT,
|
||||
eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT,
|
||||
eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT,
|
||||
@ -1417,7 +1421,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR,
|
||||
eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT,
|
||||
eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT,
|
||||
ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV
|
||||
ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV,
|
||||
ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV
|
||||
};
|
||||
|
||||
enum class PipelineCacheHeaderVersion
|
||||
@ -2185,6 +2190,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
enum class MemoryMapFlagBits : VkMemoryMapFlags
|
||||
{
|
||||
ePlacedEXT = VK_MEMORY_MAP_PLACED_BIT_EXT
|
||||
};
|
||||
|
||||
using MemoryMapFlags = Flags<MemoryMapFlagBits>;
|
||||
@ -2193,7 +2199,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
struct FlagTraits<MemoryMapFlagBits>
|
||||
{
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = {};
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = MemoryMapFlagBits::ePlacedEXT;
|
||||
};
|
||||
|
||||
enum class ImageAspectFlagBits : VkImageAspectFlags
|
||||
@ -6078,6 +6084,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
enum class MemoryUnmapFlagBitsKHR : VkMemoryUnmapFlagsKHR
|
||||
{
|
||||
eReserveEXT = VK_MEMORY_UNMAP_RESERVE_BIT_EXT
|
||||
};
|
||||
|
||||
using MemoryUnmapFlagsKHR = Flags<MemoryUnmapFlagBitsKHR>;
|
||||
@ -6086,7 +6093,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
struct FlagTraits<MemoryUnmapFlagBitsKHR>
|
||||
{
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlagsKHR allFlags = {};
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlagsKHR allFlags = MemoryUnmapFlagBitsKHR::eReserveEXT;
|
||||
};
|
||||
|
||||
//=== VK_EXT_surface_maintenance1 ===
|
||||
|
@ -279,6 +279,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
"VK_KHR_pipeline_executable_properties",
|
||||
"VK_EXT_host_image_copy",
|
||||
"VK_KHR_map_memory2",
|
||||
"VK_EXT_map_memory_placed",
|
||||
"VK_EXT_shader_atomic_float2",
|
||||
"VK_EXT_swapchain_maintenance1",
|
||||
"VK_EXT_shader_demote_to_helper_invocation",
|
||||
@ -432,7 +433,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
"VK_KHR_calibrated_timestamps",
|
||||
"VK_KHR_shader_expect_assume",
|
||||
"VK_KHR_maintenance6",
|
||||
"VK_NV_descriptor_pool_overallocation"
|
||||
"VK_NV_descriptor_pool_overallocation",
|
||||
"VK_NV_shader_atomic_float16_vector"
|
||||
};
|
||||
return deviceExtensions;
|
||||
}
|
||||
@ -1398,6 +1400,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
"VK_KHR_copy_commands2",
|
||||
"VK_KHR_format_feature_flags2",
|
||||
} } } } },
|
||||
{ "VK_EXT_map_memory_placed",
|
||||
{ { "VK_VERSION_1_0",
|
||||
{ {
|
||||
"VK_KHR_map_memory2",
|
||||
} } } } },
|
||||
{ "VK_EXT_shader_atomic_float2",
|
||||
{ { "VK_VERSION_1_0",
|
||||
{ {
|
||||
@ -2786,14 +2793,15 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|| ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_shader_atomic_float" ) ||
|
||||
( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) ||
|
||||
( extension == "VK_KHR_deferred_host_operations" ) || ( extension == "VK_KHR_pipeline_executable_properties" ) ||
|
||||
( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_atomic_float2" ) ||
|
||||
( extension == "VK_EXT_swapchain_maintenance1" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) ||
|
||||
( extension == "VK_NV_device_generated_commands" ) || ( extension == "VK_NV_inherited_viewport_scissor" ) ||
|
||||
( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) ||
|
||||
( extension == "VK_QCOM_render_pass_transform" ) || ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) ||
|
||||
( extension == "VK_EXT_robustness2" ) || ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) ||
|
||||
( extension == "VK_KHR_pipeline_library" ) || ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) ||
|
||||
( extension == "VK_KHR_present_id" ) || ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) ||
|
||||
( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_map_memory_placed" ) ||
|
||||
( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) ||
|
||||
( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) ||
|
||||
( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) ||
|
||||
( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_QCOM_render_pass_transform" ) ||
|
||||
( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || ( extension == "VK_EXT_robustness2" ) ||
|
||||
( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || ( extension == "VK_KHR_pipeline_library" ) ||
|
||||
( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_KHR_present_id" ) ||
|
||||
( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) ||
|
||||
( extension == "VK_KHR_video_encode_queue" ) || ( extension == "VK_NV_device_diagnostics_config" ) ||
|
||||
( extension == "VK_QCOM_render_pass_store_ops" )
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
@ -2866,7 +2874,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
||||
|| ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) ||
|
||||
( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) ||
|
||||
( extension == "VK_NV_descriptor_pool_overallocation" );
|
||||
( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_NV_shader_atomic_float16_vector" );
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension )
|
||||
|
@ -20667,8 +20667,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) );
|
||||
@ -20676,15 +20676,19 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
|
||||
VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2>" );
|
||||
# endif
|
||||
|
||||
d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) );
|
||||
VULKAN_HPP_NAMESPACE::Result result =
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" );
|
||||
|
||||
return createResultValueType( result );
|
||||
}
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
@ -23210,8 +23214,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>
|
||||
Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Extent2D>::type
|
||||
Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
|
||||
@ -23222,11 +23226,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
|
||||
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
|
||||
m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" );
|
||||
|
||||
return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( maxWorkgroupSize ) );
|
||||
return createResultValueType( result, std::move( maxWorkgroupSize ) );
|
||||
}
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
@ -25388,8 +25390,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename ShaderEXTAllocator, typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
|
||||
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>
|
||||
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
@ -25405,16 +25407,19 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } );
|
||||
|
||||
return createResultValueType( result, std::move( shaders ) );
|
||||
return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
std::move( shaders ) );
|
||||
}
|
||||
|
||||
template <typename ShaderEXTAllocator,
|
||||
typename Dispatch,
|
||||
typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
|
||||
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>
|
||||
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
ShaderEXTAllocator & shaderEXTAllocator,
|
||||
Dispatch const & d ) const
|
||||
@ -25431,14 +25436,17 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } );
|
||||
|
||||
return createResultValueType( result, std::move( shaders ) );
|
||||
return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
std::move( shaders ) );
|
||||
}
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
|
||||
Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT>
|
||||
Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
@ -25454,15 +25462,16 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( &shader ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } );
|
||||
|
||||
return createResultValueType( result, std::move( shader ) );
|
||||
return ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( shader ) );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_NO_SMART_HANDLE
|
||||
template <typename Dispatch, typename ShaderEXTAllocator>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
|
||||
typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>
|
||||
Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
@ -25479,7 +25488,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } );
|
||||
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders;
|
||||
uniqueShaders.reserve( createInfos.size() );
|
||||
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
|
||||
@ -25487,15 +25498,15 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
|
||||
}
|
||||
return createResultValueType( result, std::move( uniqueShaders ) );
|
||||
return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>(
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
|
||||
}
|
||||
|
||||
template <
|
||||
typename Dispatch,
|
||||
typename ShaderEXTAllocator,
|
||||
typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value, int>::type>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
|
||||
typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>
|
||||
Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
ShaderEXTAllocator & shaderEXTAllocator,
|
||||
@ -25513,7 +25524,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } );
|
||||
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator );
|
||||
uniqueShaders.reserve( createInfos.size() );
|
||||
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
|
||||
@ -25521,12 +25534,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
|
||||
}
|
||||
return createResultValueType( result, std::move( uniqueShaders ) );
|
||||
return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>(
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
|
||||
}
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
|
||||
Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>
|
||||
Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
@ -25542,10 +25556,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( &shader ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } );
|
||||
|
||||
return createResultValueType( result,
|
||||
UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
|
||||
return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>(
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
|
||||
}
|
||||
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
@ -1163,6 +1163,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
struct MemoryMapInfoKHR;
|
||||
struct MemoryUnmapInfoKHR;
|
||||
|
||||
//=== VK_EXT_map_memory_placed ===
|
||||
struct PhysicalDeviceMapMemoryPlacedFeaturesEXT;
|
||||
struct PhysicalDeviceMapMemoryPlacedPropertiesEXT;
|
||||
struct MemoryMapPlacedInfoEXT;
|
||||
|
||||
//=== VK_EXT_shader_atomic_float2 ===
|
||||
struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
|
||||
|
||||
@ -1815,6 +1820,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
//=== VK_NV_descriptor_pool_overallocation ===
|
||||
struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
|
||||
|
||||
//=== VK_NV_shader_atomic_float16_vector ===
|
||||
struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV;
|
||||
|
||||
//===================================
|
||||
//=== HANDLE forward declarations ===
|
||||
//===================================
|
||||
@ -12754,12 +12762,12 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
VULKAN_HPP_NODISCARD Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
typename ResultValueType<void>::type unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
//=== VK_EXT_swapchain_maintenance1 ===
|
||||
@ -13318,7 +13326,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::Extent2D>::type
|
||||
getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
@ -13736,44 +13744,44 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename ShaderEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ShaderEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
|
||||
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>
|
||||
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename ShaderEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ShaderEXT>,
|
||||
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
|
||||
typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, VULKAN_HPP_NAMESPACE::ShaderEXT>::value, int>::type = 0>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
|
||||
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
ShaderEXTAllocator & shaderEXTAllocator,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>
|
||||
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
ShaderEXTAllocator & shaderEXTAllocator,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
|
||||
createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::ShaderEXT>
|
||||
createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
# ifndef VULKAN_HPP_NO_SMART_HANDLE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
|
||||
typename ShaderEXTAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
|
||||
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>
|
||||
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
|
||||
typename ShaderEXTAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>,
|
||||
typename std::enable_if<std::is_same<typename ShaderEXTAllocator::value_type, UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::value,
|
||||
int>::type = 0>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
|
||||
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
ShaderEXTAllocator & shaderEXTAllocator,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>
|
||||
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
ShaderEXTAllocator & shaderEXTAllocator,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
|
||||
createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>
|
||||
createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
|
@ -6815,6 +6815,19 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT const & memoryMapPlacedInfoEXT ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pPlacedAddress );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>
|
||||
{
|
||||
@ -9683,6 +9696,36 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT const & physicalDeviceMapMemoryPlacedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapPlaced );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapRangePlaced );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryUnmapReserve );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT const & physicalDeviceMapMemoryPlacedPropertiesEXT ) const
|
||||
VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.minPlacedMemoryMapAlignment );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>
|
||||
{
|
||||
@ -10949,6 +10992,20 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & physicalDeviceShaderAtomicFloat16VectorFeaturesNV )
|
||||
const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.shaderFloat16VectorAtomics );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>
|
||||
{
|
||||
|
@ -4311,7 +4311,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const;
|
||||
|
||||
void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const;
|
||||
|
||||
//=== VK_EXT_swapchain_maintenance1 ===
|
||||
|
||||
@ -10409,7 +10409,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
//=== VK_HUAWEI_subpass_shading ===
|
||||
|
||||
VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI() const;
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const;
|
||||
|
||||
private:
|
||||
VULKAN_HPP_NAMESPACE::Device m_device = {};
|
||||
@ -10806,10 +10806,12 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||
VkShaderEXT shader,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
|
||||
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
||||
: m_device( device )
|
||||
, m_shader( shader )
|
||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||
, m_constructorSuccessCode( successCode )
|
||||
, m_dispatcher( device.getDispatcher() )
|
||||
{
|
||||
}
|
||||
@ -10828,6 +10830,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
|
||||
, m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) )
|
||||
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
|
||||
, m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) )
|
||||
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
|
||||
{
|
||||
}
|
||||
@ -10841,6 +10844,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
std::swap( m_device, rhs.m_device );
|
||||
std::swap( m_shader, rhs.m_shader );
|
||||
std::swap( m_allocator, rhs.m_allocator );
|
||||
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
|
||||
std::swap( m_dispatcher, rhs.m_dispatcher );
|
||||
}
|
||||
return *this;
|
||||
@ -10863,20 +10867,27 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
getDispatcher()->vkDestroyShaderEXT(
|
||||
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
|
||||
}
|
||||
m_device = nullptr;
|
||||
m_shader = nullptr;
|
||||
m_allocator = nullptr;
|
||||
m_dispatcher = nullptr;
|
||||
m_device = nullptr;
|
||||
m_shader = nullptr;
|
||||
m_allocator = nullptr;
|
||||
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
|
||||
m_dispatcher = nullptr;
|
||||
}
|
||||
|
||||
VULKAN_HPP_NAMESPACE::ShaderEXT release()
|
||||
{
|
||||
m_device = nullptr;
|
||||
m_allocator = nullptr;
|
||||
m_dispatcher = nullptr;
|
||||
m_device = nullptr;
|
||||
m_allocator = nullptr;
|
||||
m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
|
||||
m_dispatcher = nullptr;
|
||||
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr );
|
||||
}
|
||||
|
||||
VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
|
||||
{
|
||||
return m_constructorSuccessCode;
|
||||
}
|
||||
|
||||
VULKAN_HPP_NAMESPACE::Device getDevice() const
|
||||
{
|
||||
return m_device;
|
||||
@ -10893,6 +10904,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
std::swap( m_device, rhs.m_device );
|
||||
std::swap( m_shader, rhs.m_shader );
|
||||
std::swap( m_allocator, rhs.m_allocator );
|
||||
std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
|
||||
std::swap( m_dispatcher, rhs.m_dispatcher );
|
||||
}
|
||||
|
||||
@ -10901,10 +10913,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
VULKAN_HPP_NODISCARD std::vector<uint8_t> getBinaryData() const;
|
||||
|
||||
private:
|
||||
VULKAN_HPP_NAMESPACE::Device m_device = {};
|
||||
VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {};
|
||||
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
|
||||
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
|
||||
VULKAN_HPP_NAMESPACE::Device m_device = {};
|
||||
VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {};
|
||||
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
|
||||
VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
|
||||
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
|
||||
};
|
||||
|
||||
class ShaderEXTs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
|
||||
@ -20221,11 +20234,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
return pData;
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2>" );
|
||||
|
||||
getDispatcher()->vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) );
|
||||
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
|
||||
getDispatcher()->vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) ) );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" );
|
||||
}
|
||||
|
||||
//=== VK_EXT_swapchain_maintenance1 ===
|
||||
@ -21311,8 +21326,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
//=== VK_HUAWEI_subpass_shading ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D>
|
||||
RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI &&
|
||||
"Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" );
|
||||
@ -21320,11 +21334,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
|
||||
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
|
||||
static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) ) );
|
||||
resultCheck( result,
|
||||
VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
|
||||
resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" );
|
||||
|
||||
return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( maxWorkgroupSize ) );
|
||||
return maxWorkgroupSize;
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT
|
||||
@ -22521,7 +22533,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( shaders.data() ) ) );
|
||||
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
||||
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) )
|
||||
{
|
||||
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
|
||||
return VULKAN_HPP_UNEXPECTED( result );
|
||||
@ -22534,7 +22546,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
shadersRAII.reserve( shaders.size() );
|
||||
for ( auto & shader : shaders )
|
||||
{
|
||||
shadersRAII.emplace_back( *this, *reinterpret_cast<VkShaderEXT *>( &shader ), allocator );
|
||||
shadersRAII.emplace_back( *this, *reinterpret_cast<VkShaderEXT *>( &shader ), allocator, result );
|
||||
}
|
||||
return shadersRAII;
|
||||
}
|
||||
@ -22551,7 +22563,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkShaderEXT *>( &shader ) ) );
|
||||
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
||||
if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) )
|
||||
{
|
||||
# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
|
||||
return VULKAN_HPP_UNEXPECTED( result );
|
||||
|
@ -4666,6 +4666,28 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryUn
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR>::value,
|
||||
"MemoryUnmapInfoKHR is not nothrow_move_constructible!" );
|
||||
|
||||
//=== VK_EXT_map_memory_placed ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT>::value,
|
||||
"PhysicalDeviceMapMemoryPlacedFeaturesEXT is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT>::value,
|
||||
"PhysicalDeviceMapMemoryPlacedPropertiesEXT is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT ) == sizeof( VkMemoryMapPlacedInfoEXT ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT>::value,
|
||||
"MemoryMapPlacedInfoEXT is not nothrow_move_constructible!" );
|
||||
|
||||
//=== VK_EXT_shader_atomic_float2 ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) ==
|
||||
@ -7452,4 +7474,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV>::value,
|
||||
"PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" );
|
||||
|
||||
//=== VK_NV_shader_atomic_float16_vector ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ) ==
|
||||
sizeof( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV>::value,
|
||||
"PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV is not nothrow_move_constructible!" );
|
||||
|
||||
#endif
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -382,9 +382,16 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags value )
|
||||
{
|
||||
return "{}";
|
||||
if ( !value )
|
||||
return "{}";
|
||||
|
||||
std::string result;
|
||||
if ( value & MemoryMapFlagBits::ePlacedEXT )
|
||||
result += "PlacedEXT | ";
|
||||
|
||||
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
|
||||
@ -2810,9 +2817,16 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR )
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR value )
|
||||
{
|
||||
return "{}";
|
||||
if ( !value )
|
||||
return "{}";
|
||||
|
||||
std::string result;
|
||||
if ( value & MemoryUnmapFlagBitsKHR::eReserveEXT )
|
||||
result += "ReserveEXT | ";
|
||||
|
||||
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
|
||||
}
|
||||
|
||||
//=== VK_EXT_surface_maintenance1 ===
|
||||
@ -3579,7 +3593,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR";
|
||||
case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR";
|
||||
case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT";
|
||||
case Result::eErrorIncompatibleShaderBinaryEXT: return "ErrorIncompatibleShaderBinaryEXT";
|
||||
case Result::eIncompatibleShaderBinaryEXT: return "IncompatibleShaderBinaryEXT";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
@ -4184,6 +4198,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case StructureType::eHostImageCopyDevicePerformanceQueryEXT: return "HostImageCopyDevicePerformanceQueryEXT";
|
||||
case StructureType::eMemoryMapInfoKHR: return "MemoryMapInfoKHR";
|
||||
case StructureType::eMemoryUnmapInfoKHR: return "MemoryUnmapInfoKHR";
|
||||
case StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT: return "PhysicalDeviceMapMemoryPlacedFeaturesEXT";
|
||||
case StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT: return "PhysicalDeviceMapMemoryPlacedPropertiesEXT";
|
||||
case StructureType::eMemoryMapPlacedInfoEXT: return "MemoryMapPlacedInfoEXT";
|
||||
case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT";
|
||||
case StructureType::eSurfacePresentModeEXT: return "SurfacePresentModeEXT";
|
||||
case StructureType::eSurfacePresentScalingCapabilitiesEXT: return "SurfacePresentScalingCapabilitiesEXT";
|
||||
@ -4532,6 +4549,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT";
|
||||
case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT";
|
||||
case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV";
|
||||
case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV: return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
@ -5151,9 +5169,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits value )
|
||||
{
|
||||
return "(void)";
|
||||
switch ( value )
|
||||
{
|
||||
case MemoryMapFlagBits::ePlacedEXT: return "PlacedEXT";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
|
||||
@ -8111,9 +8133,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR )
|
||||
VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR value )
|
||||
{
|
||||
return "(void)";
|
||||
switch ( value )
|
||||
{
|
||||
case MemoryUnmapFlagBitsKHR::eReserveEXT: return "ReserveEXT";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
|
||||
//=== VK_EXT_surface_maintenance1 ===
|
||||
|
@ -17751,6 +17751,18 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
ExtensionProperties( std::string const & extensionName_, uint32_t specVersion_ = {} ) : specVersion( specVersion_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( extensionName_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( extensionName, VK_MAX_EXTENSION_NAME_SIZE, extensionName_.data(), extensionName_.size() );
|
||||
# else
|
||||
strncpy( extensionName, extensionName_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, extensionName_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -17783,22 +17795,26 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( ExtensionProperties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 )
|
||||
return cmp;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion );
|
||||
# endif
|
||||
return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion );
|
||||
}
|
||||
|
||||
bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
|
||||
@ -26364,6 +26380,26 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) ) {}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
LayerProperties( std::string const & layerName_, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::string const & description_ = {} )
|
||||
: specVersion( specVersion_ ), implementationVersion( implementationVersion_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( layerName_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( layerName, VK_MAX_EXTENSION_NAME_SIZE, layerName_.data(), layerName_.size() );
|
||||
# else
|
||||
strncpy( layerName, layerName_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, layerName_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
|
||||
# else
|
||||
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -26399,23 +26435,31 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( LayerProperties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) &&
|
||||
( description == rhs.description );
|
||||
# endif
|
||||
return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) &&
|
||||
( strcmp( description, rhs.description ) == 0 );
|
||||
}
|
||||
|
||||
bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
|
||||
@ -28037,6 +28081,37 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_,
|
||||
std::string const & name_,
|
||||
std::string const & category_ = {},
|
||||
std::string const & description_ = {},
|
||||
void * pNext_ = nullptr )
|
||||
: pNext( pNext_ ), flags( flags_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() );
|
||||
# else
|
||||
strncpy( name, name_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, name_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( category_.size() < VK_MAX_DESCRIPTION_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( category, VK_MAX_DESCRIPTION_SIZE, category_.data(), category_.size() );
|
||||
# else
|
||||
strncpy( category, category_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, category_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
|
||||
# else
|
||||
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -28074,23 +28149,35 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && ( category == rhs.category ) &&
|
||||
( description == rhs.description );
|
||||
# endif
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) &&
|
||||
( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 );
|
||||
}
|
||||
|
||||
bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
|
||||
@ -30587,6 +30674,30 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_,
|
||||
std::string const & driverName_,
|
||||
std::string const & driverInfo_ = {},
|
||||
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
|
||||
void * pNext_ = nullptr )
|
||||
: pNext( pNext_ ), driverID( driverID_ ), conformanceVersion( conformanceVersion_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() );
|
||||
# else
|
||||
strncpy( driverName, driverName_.data(), std::min<size_t>( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() );
|
||||
# else
|
||||
strncpy( driverInfo, driverInfo_.data(), std::min<size_t>( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -30624,23 +30735,35 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 )
|
||||
return cmp;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) &&
|
||||
( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion );
|
||||
# endif
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) &&
|
||||
( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion );
|
||||
}
|
||||
|
||||
bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
|
||||
@ -32795,6 +32918,19 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PhysicalDeviceGroupProperties( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_,
|
||||
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {},
|
||||
void * pNext_ = nullptr )
|
||||
: pNext( pNext_ )
|
||||
, physicalDeviceCount( std::min( static_cast<uint32_t>( physicalDevices_.size() ), VK_MAX_DEVICE_GROUP_SIZE ) )
|
||||
, subsetAllocation( subsetAllocation_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( physicalDevices_.size() < VK_MAX_DEVICE_GROUP_SIZE );
|
||||
memcpy( physicalDevices, physicalDevices_.data(), physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) );
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -32831,23 +32967,37 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 )
|
||||
return cmp;
|
||||
for ( size_t i = 0; i < physicalDeviceCount; ++i )
|
||||
{
|
||||
if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 )
|
||||
return cmp;
|
||||
}
|
||||
if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 )
|
||||
return cmp;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
|
||||
( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation );
|
||||
# endif
|
||||
( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) &&
|
||||
( subsetAllocation == rhs.subsetAllocation );
|
||||
}
|
||||
|
||||
bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
|
||||
@ -34927,6 +35077,19 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PhysicalDeviceMemoryProperties( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::MemoryType> const & memoryTypes_,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::MemoryHeap> const & memoryHeaps_ = {} )
|
||||
: memoryTypeCount( std::min( static_cast<uint32_t>( memoryTypes_.size() ), VK_MAX_MEMORY_TYPES ) )
|
||||
, memoryHeapCount( std::min( static_cast<uint32_t>( memoryHeaps_.size() ), VK_MAX_MEMORY_HEAPS ) )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( memoryTypes_.size() < VK_MAX_MEMORY_TYPES );
|
||||
memcpy( memoryTypes, memoryTypes_.data(), memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) );
|
||||
VULKAN_HPP_ASSERT( memoryHeaps_.size() < VK_MAX_MEMORY_HEAPS );
|
||||
memcpy( memoryHeaps, memoryHeaps_.data(), memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) );
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -34962,23 +35125,39 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 )
|
||||
return cmp;
|
||||
for ( size_t i = 0; i < memoryTypeCount; ++i )
|
||||
{
|
||||
if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 )
|
||||
return cmp;
|
||||
}
|
||||
if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 )
|
||||
return cmp;
|
||||
for ( size_t i = 0; i < memoryHeapCount; ++i )
|
||||
{
|
||||
if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 )
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && ( memoryHeapCount == rhs.memoryHeapCount ) &&
|
||||
( memoryHeaps == rhs.memoryHeaps );
|
||||
# endif
|
||||
return ( memoryTypeCount == rhs.memoryTypeCount ) &&
|
||||
( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) &&
|
||||
( memoryHeapCount == rhs.memoryHeapCount ) &&
|
||||
( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
|
||||
}
|
||||
|
||||
bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
uint32_t memoryTypeCount = {};
|
||||
@ -35979,6 +36158,34 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PhysicalDeviceProperties( uint32_t apiVersion_,
|
||||
uint32_t driverVersion_,
|
||||
uint32_t vendorID_,
|
||||
uint32_t deviceID_,
|
||||
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_,
|
||||
std::string const & deviceName_,
|
||||
std::array<uint8_t, VK_UUID_SIZE> const & pipelineCacheUUID_ = {},
|
||||
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
|
||||
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} )
|
||||
: apiVersion( apiVersion_ )
|
||||
, driverVersion( driverVersion_ )
|
||||
, vendorID( vendorID_ )
|
||||
, deviceID( deviceID_ )
|
||||
, deviceType( deviceType_ )
|
||||
, pipelineCacheUUID( pipelineCacheUUID_ )
|
||||
, limits( limits_ )
|
||||
, sparseProperties( sparseProperties_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( deviceName_.size() < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.data(), deviceName_.size() );
|
||||
# else
|
||||
strncpy( deviceName, deviceName_.data(), std::min<size_t>( VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -36019,24 +36226,42 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PhysicalDeviceProperties const & ) const = default;
|
||||
#else
|
||||
std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
|
||||
if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = limits <=> rhs.limits; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 )
|
||||
return cmp;
|
||||
|
||||
return std::partial_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) &&
|
||||
( deviceType == rhs.deviceType ) && ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
|
||||
( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
|
||||
( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties );
|
||||
# endif
|
||||
}
|
||||
|
||||
bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
uint32_t apiVersion = {};
|
||||
@ -39506,6 +39731,45 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PhysicalDeviceToolProperties( std::string const & name_,
|
||||
std::string const & version_ = {},
|
||||
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {},
|
||||
std::string const & description_ = {},
|
||||
std::string const & layer_ = {},
|
||||
void * pNext_ = nullptr )
|
||||
: pNext( pNext_ ), purposes( purposes_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( name_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( name, VK_MAX_EXTENSION_NAME_SIZE, name_.data(), name_.size() );
|
||||
# else
|
||||
strncpy( name, name_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, name_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( version_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( version, VK_MAX_EXTENSION_NAME_SIZE, version_.data(), version_.size() );
|
||||
# else
|
||||
strncpy( version, version_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, version_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() );
|
||||
# else
|
||||
strncpy( description, description_.data(), std::min<size_t>( VK_MAX_DESCRIPTION_SIZE, description_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( layer_.size() < VK_MAX_EXTENSION_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( layer, VK_MAX_EXTENSION_NAME_SIZE, layer_.data(), layer_.size() );
|
||||
# else
|
||||
strncpy( layer, layer_.data(), std::min<size_t>( VK_MAX_EXTENSION_NAME_SIZE, layer_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -39544,23 +39808,37 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PhysicalDeviceToolProperties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && ( purposes == rhs.purposes ) &&
|
||||
( description == rhs.description ) && ( layer == rhs.layer );
|
||||
# endif
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) &&
|
||||
( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 );
|
||||
}
|
||||
|
||||
bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
|
||||
@ -41176,6 +41454,129 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
}
|
||||
|
||||
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
||||
PhysicalDeviceVulkan12Properties(
|
||||
VULKAN_HPP_NAMESPACE::DriverId driverID_,
|
||||
std::string const & driverName_,
|
||||
std::string const & driverInfo_ = {},
|
||||
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
|
||||
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
||||
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
|
||||
uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
|
||||
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
|
||||
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
|
||||
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
|
||||
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
|
||||
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
|
||||
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
|
||||
uint32_t maxPerStageUpdateAfterBindResources_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
|
||||
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
|
||||
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
|
||||
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
|
||||
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
|
||||
uint64_t maxTimelineSemaphoreValueDifference_ = {},
|
||||
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {},
|
||||
void * pNext_ = nullptr )
|
||||
: pNext( pNext_ )
|
||||
, driverID( driverID_ )
|
||||
, conformanceVersion( conformanceVersion_ )
|
||||
, denormBehaviorIndependence( denormBehaviorIndependence_ )
|
||||
, roundingModeIndependence( roundingModeIndependence_ )
|
||||
, shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
|
||||
, shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
|
||||
, shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
|
||||
, shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
|
||||
, shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
|
||||
, shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
|
||||
, shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
|
||||
, shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
|
||||
, shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
|
||||
, shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
|
||||
, shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
|
||||
, shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
|
||||
, shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
|
||||
, shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
|
||||
, shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
|
||||
, maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
|
||||
, shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
|
||||
, shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
|
||||
, shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
|
||||
, shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
|
||||
, shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
|
||||
, robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
|
||||
, quadDivergentImplicitLod( quadDivergentImplicitLod_ )
|
||||
, maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
|
||||
, maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
|
||||
, maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
|
||||
, maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
|
||||
, maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
|
||||
, maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
|
||||
, maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
|
||||
, maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
|
||||
, maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
|
||||
, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
|
||||
, maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
|
||||
, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
|
||||
, maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
|
||||
, maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
|
||||
, maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
|
||||
, supportedDepthResolveModes( supportedDepthResolveModes_ )
|
||||
, supportedStencilResolveModes( supportedStencilResolveModes_ )
|
||||
, independentResolveNone( independentResolveNone_ )
|
||||
, independentResolve( independentResolve_ )
|
||||
, filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
|
||||
, filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
|
||||
, maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
|
||||
, framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
|
||||
{
|
||||
VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() );
|
||||
# else
|
||||
strncpy( driverName, driverName_.data(), std::min<size_t>( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) );
|
||||
# endif
|
||||
|
||||
VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE );
|
||||
# if defined( WIN32 )
|
||||
strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() );
|
||||
# else
|
||||
strncpy( driverInfo, driverInfo_.data(), std::min<size_t>( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) );
|
||||
# endif
|
||||
}
|
||||
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
||||
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
||||
|
||||
@ -41314,15 +41715,125 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
#endif
|
||||
|
||||
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
|
||||
auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default;
|
||||
#else
|
||||
std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 )
|
||||
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
||||
if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 )
|
||||
return cmp;
|
||||
if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 )
|
||||
return cmp;
|
||||
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
# if defined( VULKAN_HPP_USE_REFLECT )
|
||||
return this->reflect() == rhs.reflect();
|
||||
# else
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) &&
|
||||
( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) &&
|
||||
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) &&
|
||||
( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) &&
|
||||
( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
|
||||
( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
|
||||
( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
|
||||
@ -41362,14 +41873,12 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) &&
|
||||
( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) &&
|
||||
( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
|
||||
# endif
|
||||
}
|
||||
|
||||
bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return !operator==( rhs );
|
||||
}
|
||||
#endif
|
||||
|
||||
public:
|
||||
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
|
||||
|
Loading…
Reference in New Issue
Block a user