Update Vulkan-Headers to v1.3.246 (#1551)

This commit is contained in:
Andreas Süßenbach 2023-04-03 13:29:44 +02:00 committed by GitHub
parent 35ab2bb711
commit 678295aa75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1999 additions and 108 deletions

@ -1 +1 @@
Subproject commit 9b9fd871b08110cd8f0b74e721b03213d9cc3081
Subproject commit 63af1cf1ee906ba4dcd5a324bdd0201d4f4bfd12

View File

@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
# include <span>
#endif
static_assert( VK_HEADER_VERSION == 245, "Wrong VK_HEADER_VERSION!" );
static_assert( VK_HEADER_VERSION == 246, "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@ -5854,6 +5854,35 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
}
//=== VK_EXT_shader_object ===
VkResult vkCreateShadersEXT( VkDevice device,
uint32_t createInfoCount,
const VkShaderCreateInfoEXT * pCreateInfos,
const VkAllocationCallbacks * pAllocator,
VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders );
}
void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyShaderEXT( device, shader, pAllocator );
}
VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData );
}
void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer,
uint32_t stageCount,
const VkShaderStageFlagBits * pStages,
const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders );
}
//=== VK_QCOM_tile_properties ===
VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device,
@ -6543,6 +6572,14 @@ namespace VULKAN_HPP_NAMESPACE
CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
};
class IncompatibleShaderBinaryEXTError : public SystemError
{
public:
IncompatibleShaderBinaryEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {}
IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {}
};
namespace
{
[[noreturn]] void throwResultException( Result result, char const * message )
@ -6587,6 +6624,7 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message );
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message );
case Result::eErrorIncompatibleShaderBinaryEXT: throw IncompatibleShaderBinaryEXTError( message );
default: throw SystemError( make_error_code( result ), message );
}
}
@ -7971,6 +8009,15 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfo, ShaderCreateInfoEXT>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, PhysicalDeviceFeatures2>
{
@ -12257,6 +12304,34 @@ namespace VULKAN_HPP_NAMESPACE
};
};
//=== VK_EXT_shader_tile_image ===
template <>
struct StructExtends<PhysicalDeviceShaderTileImageFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderTileImageFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderTileImagePropertiesEXT, PhysicalDeviceProperties2>
{
enum
{
value = true
};
};
//=== VK_EXT_opacity_micromap ===
template <>
struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, PhysicalDeviceFeatures2>
@ -12913,6 +12988,34 @@ namespace VULKAN_HPP_NAMESPACE
};
};
//=== VK_EXT_shader_object ===
template <>
struct StructExtends<PhysicalDeviceShaderObjectFeaturesEXT, PhysicalDeviceFeatures2>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderObjectFeaturesEXT, DeviceCreateInfo>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceShaderObjectPropertiesEXT, PhysicalDeviceProperties2>
{
enum
{
value = true
};
};
//=== VK_QCOM_tile_properties ===
template <>
struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, PhysicalDeviceFeatures2>
@ -14263,6 +14366,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0;
PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0;
//=== VK_QCOM_tile_properties ===
PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
@ -15545,6 +15654,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
@ -16496,6 +16611,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =

View File

@ -80,7 +80,8 @@ namespace VULKAN_HPP_NAMESPACE
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT
eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT,
eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT
};
enum class StructureType
@ -989,6 +990,8 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT,
ePhysicalDeviceShaderTileImageFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT,
ePhysicalDeviceShaderTileImagePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT,
eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT,
eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT,
eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT,
@ -1055,6 +1058,10 @@ namespace VULKAN_HPP_NAMESPACE
eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT,
ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT,
eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT,
eShaderRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM,
eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM,
ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC,
@ -1130,7 +1137,8 @@ namespace VULKAN_HPP_NAMESPACE
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV
eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT
};
enum class VendorId
@ -6471,6 +6479,37 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints;
};
//=== VK_EXT_shader_object ===
enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT
{
eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT,
eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT,
eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT,
eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT,
eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT,
eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT
};
using ShaderCreateFlagsEXT = Flags<ShaderCreateFlagBitsEXT>;
template <>
struct FlagTraits<ShaderCreateFlagBitsEXT>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags =
ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups |
ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment |
ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment;
};
enum class ShaderCodeTypeEXT
{
eBinary = VK_SHADER_CODE_TYPE_BINARY_EXT,
eSpirv = VK_SHADER_CODE_TYPE_SPIRV_EXT
};
//=== VK_NV_ray_tracing_invocation_reorder ===
enum class RayTracingInvocationReorderModeNV

View File

@ -223,7 +223,8 @@ namespace VULKAN_HPP_NAMESPACE
( name == "VK_EXT_pipeline_properties" ) || ( name == "VK_EXT_multisampled_render_to_single_sampled" ) ||
( name == "VK_EXT_extended_dynamic_state2" ) || ( name == "VK_EXT_color_write_enable" ) || ( name == "VK_EXT_primitives_generated_query" ) ||
( name == "VK_KHR_ray_tracing_maintenance1" ) || ( name == "VK_EXT_global_priority_query" ) || ( name == "VK_EXT_image_view_min_lod" ) ||
( name == "VK_EXT_multi_draw" ) || ( name == "VK_EXT_image_2d_view_of_3d" ) || ( name == "VK_EXT_opacity_micromap" ) ||
( name == "VK_EXT_multi_draw" ) || ( name == "VK_EXT_image_2d_view_of_3d" ) || ( name == "VK_EXT_shader_tile_image" ) ||
( name == "VK_EXT_opacity_micromap" ) ||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
( name == "VK_NV_displacement_micromap" ) ||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
@ -235,8 +236,8 @@ namespace VULKAN_HPP_NAMESPACE
( name == "VK_EXT_image_compression_control_swapchain" ) || ( name == "VK_QCOM_image_processing" ) || ( name == "VK_EXT_extended_dynamic_state3" ) ||
( name == "VK_EXT_subpass_merge_feedback" ) || ( name == "VK_EXT_shader_module_identifier" ) ||
( name == "VK_EXT_rasterization_order_attachment_access" ) || ( name == "VK_NV_optical_flow" ) || ( name == "VK_EXT_legacy_dithering" ) ||
( name == "VK_EXT_pipeline_protected_access" ) || ( name == "VK_QCOM_tile_properties" ) || ( name == "VK_SEC_amigo_profiling" ) ||
( name == "VK_QCOM_multiview_per_view_viewports" ) || ( name == "VK_NV_ray_tracing_invocation_reorder" ) ||
( name == "VK_EXT_pipeline_protected_access" ) || ( name == "VK_EXT_shader_object" ) || ( name == "VK_QCOM_tile_properties" ) ||
( name == "VK_SEC_amigo_profiling" ) || ( name == "VK_QCOM_multiview_per_view_viewports" ) || ( name == "VK_NV_ray_tracing_invocation_reorder" ) ||
( name == "VK_EXT_mutable_descriptor_type" ) || ( name == "VK_ARM_shader_core_builtins" ) || ( name == "VK_EXT_pipeline_library_group_handles" ) ||
( name == "VK_QCOM_multiview_per_view_render_areas" );
}

View File

@ -21517,6 +21517,317 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_object ===
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount,
const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateShadersEXT( m_device,
createInfoCount,
reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ),
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
reinterpret_cast<VkShaderEXT *>( pShaders ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ShaderEXTAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaders );
}
template <typename ShaderEXTAllocator,
typename Dispatch,
typename B0,
typename std::enable_if<std::is_same<typename B0::value_type, ShaderEXT>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaders );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::ShaderEXT shader;
VkResult result =
d.vkCreateShadersEXT( m_device,
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( &shader ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shader );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch, typename ShaderEXTAllocator>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders;
uniqueShaders.reserve( createInfos.size() );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
for ( auto const & shader : shaders )
{
uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
}
template <typename Dispatch,
typename ShaderEXTAllocator,
typename B0,
typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
VkResult result =
d.vkCreateShadersEXT( m_device,
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator );
uniqueShaders.reserve( createInfos.size() );
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
for ( auto const & shader : shaders )
{
uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
}
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
VULKAN_HPP_NAMESPACE::ShaderEXT shader;
VkResult result =
d.vkCreateShadersEXT( m_device,
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
reinterpret_cast<VkShaderEXT *>( &shader ) );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" );
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device,
static_cast<VkShaderEXT>( shader ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyShaderEXT( m_device,
static_cast<VkShaderEXT>( shader ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<uint8_t, Uint8_tAllocator> data;
size_t dataSize;
VkResult result;
do
{
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr );
if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
template <typename Uint8_tAllocator,
typename Dispatch,
typename B1,
typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
size_t dataSize;
VkResult result;
do
{
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr );
if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount,
const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,
const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdBindShadersEXT(
m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
# ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( stages.size() == shaders.size() );
# else
if ( stages.size() != shaders.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
}
# endif /*VULKAN_HPP_NO_EXCEPTIONS*/
d.vkCmdBindShadersEXT( m_commandBuffer,
stages.size(),
reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_QCOM_tile_properties ===
template <typename Dispatch>

View File

@ -433,6 +433,7 @@ namespace VULKAN_HPP_NAMESPACE
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
struct PhysicalDeviceInlineUniformBlockFeatures;
using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
struct PhysicalDeviceInlineUniformBlockProperties;
@ -1433,6 +1434,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_image_2d_view_of_3d ===
struct PhysicalDeviceImage2DViewOf3DFeaturesEXT;
//=== VK_EXT_shader_tile_image ===
struct PhysicalDeviceShaderTileImageFeaturesEXT;
struct PhysicalDeviceShaderTileImagePropertiesEXT;
//=== VK_EXT_opacity_micromap ===
struct MicromapBuildInfoEXT;
struct MicromapUsageEXT;
@ -1553,6 +1558,11 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_pipeline_protected_access ===
struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
//=== VK_EXT_shader_object ===
struct PhysicalDeviceShaderObjectFeaturesEXT;
struct PhysicalDeviceShaderObjectPropertiesEXT;
struct ShaderCreateInfoEXT;
//=== VK_QCOM_tile_properties ===
struct PhysicalDeviceTilePropertiesFeaturesQCOM;
struct TilePropertiesQCOM;
@ -1678,6 +1688,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
//=== VK_EXT_shader_object ===
class ShaderEXT;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
//======================
//=== UNIQUE HANDLEs ===
@ -2082,6 +2095,16 @@ namespace VULKAN_HPP_NAMESPACE
};
using UniqueOpticalFlowSessionNV = UniqueHandle<OpticalFlowSessionNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_EXT_shader_object ===
template <typename Dispatch>
class UniqueHandleTraits<ShaderEXT, Dispatch>
{
public:
using deleter = ObjectDestroy<Device, Dispatch>;
};
using UniqueShaderEXT = UniqueHandle<ShaderEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
//===============
@ -3309,6 +3332,87 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
class ShaderEXT
{
public:
using CType = VkShaderEXT;
using NativeType = VkShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
VULKAN_HPP_CONSTEXPR ShaderEXT() = default;
VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
VULKAN_HPP_TYPESAFE_EXPLICIT ShaderEXT( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( shaderEXT ) {}
#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
ShaderEXT & operator=( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT
{
m_shaderEXT = shaderEXT;
return *this;
}
#endif
ShaderEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
m_shaderEXT = {};
return *this;
}
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( ShaderEXT const & ) const = default;
#else
bool operator==( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT == rhs.m_shaderEXT;
}
bool operator!=( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT != rhs.m_shaderEXT;
}
bool operator<( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT < rhs.m_shaderEXT;
}
#endif
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
return m_shaderEXT == VK_NULL_HANDLE;
}
private:
VkShaderEXT m_shaderEXT = {};
};
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT>
{
using Type = VULKAN_HPP_NAMESPACE::ShaderEXT;
};
template <>
struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderEXT>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
class Image
{
public:
@ -6093,6 +6197,20 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_object ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindShadersEXT( uint32_t stageCount,
const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,
const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
{
return m_commandBuffer;
@ -12637,6 +12755,97 @@ namespace VULKAN_HPP_NAMESPACE
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
//=== VK_EXT_shader_object ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createShadersEXT( uint32_t createInfoCount,
const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename ShaderEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ShaderEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename ShaderEXTAllocator = std::allocator<VULKAN_HPP_NAMESPACE::ShaderEXT>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B0 = ShaderEXTAllocator,
typename std::enable_if<std::is_same<typename B0::value_type, ShaderEXT>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename ShaderEXTAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename ShaderEXTAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>,
typename B0 = ShaderEXTAllocator,
typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
ShaderEXTAllocator & shaderEXTAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
size_t * pDataSize,
void * pData,
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
template <typename Uint8_tAllocator = std::allocator<uint8_t>,
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
typename B1 = Uint8_tAllocator,
typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type = 0>
VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderBinaryDataEXT(
VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_QCOM_tile_properties ===
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>

View File

@ -492,6 +492,17 @@ namespace std
}
};
//=== VK_EXT_shader_object ===
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderEXT const & shaderEXT ) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkShaderEXT>{}( static_cast<VkShaderEXT>( shaderEXT ) );
}
};
#if 14 <= VULKAN_HPP_CPP_VERSION
//======================================
//=== HASH structures for structures ===
@ -10074,6 +10085,38 @@ namespace std
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const & physicalDeviceShaderObjectFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.shaderObject );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const & physicalDeviceShaderObjectPropertiesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.pNext );
for ( size_t i = 0; i < VK_UUID_SIZE; ++i )
{
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryUUID[i] );
}
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryVersion );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>
{
@ -10145,6 +10188,38 @@ namespace std
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>
{
std::size_t
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const & physicalDeviceShaderTileImageFeaturesEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageColorReadAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageDepthReadAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageStencilReadAccess );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const & physicalDeviceShaderTileImagePropertiesEXT ) const
VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageCoherentReadAccelerated );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadSampleFromPixelRateInvocation );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadFromHelperInvocation );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>
{
@ -12680,6 +12755,33 @@ namespace std
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>
{
std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & shaderCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.flags );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.stage );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.nextStage );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeType );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeSize );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pCode );
for ( const char * p = shaderCreateInfoEXT.pName; *p != '\0'; ++p )
{
VULKAN_HPP_HASH_COMBINE( seed, *p );
}
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.setLayoutCount );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSetLayouts );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pushConstantRangeCount );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pPushConstantRanges );
VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSpecializationInfo );
return seed;
}
};
template <>
struct hash<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>
{

View File

@ -1593,6 +1593,12 @@ namespace VULKAN_HPP_NAMESPACE
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) );
vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) );
//=== VK_QCOM_tile_properties ===
vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
vkGetDynamicRenderingTilePropertiesQCOM =
@ -2384,6 +2390,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0;
PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0;
//=== VK_QCOM_tile_properties ===
PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
@ -2480,6 +2492,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
//=== VK_EXT_shader_object ===
class ShaderEXT;
//====================
//=== RAII HANDLES ===
//====================
@ -4139,6 +4154,16 @@ namespace VULKAN_HPP_NAMESPACE
createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderEXT
createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
@ -5887,6 +5912,11 @@ namespace VULKAN_HPP_NAMESPACE
void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_EXT_shader_object ===
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
@ -10269,6 +10299,172 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ShaderEXT
{
public:
using CType = VkShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
VULKAN_HPP_NAMESPACE::Result result =
static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateShadersEXT( static_cast<VkDevice>( *device ),
1,
reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
reinterpret_cast<VkShaderEXT *>( &m_shader ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
throwResultException( result, "vkCreateShadersEXT" );
}
}
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VkShaderEXT shader,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( *device )
, m_shader( shader )
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
, m_dispatcher( device.getDispatcher() )
{
}
ShaderEXT( std::nullptr_t ) {}
~ShaderEXT()
{
clear();
}
ShaderEXT() = delete;
ShaderEXT( ShaderEXT const & ) = delete;
ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
, m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) )
, m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
, m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{
}
ShaderEXT & operator=( ShaderEXT const & ) = delete;
ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
clear();
m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} );
m_shader = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} );
m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} );
m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr );
}
return *this;
}
VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_shader;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_shader )
{
getDispatcher()->vkDestroyShaderEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_shader = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::ShaderEXT release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_shader, rhs.m_shader );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD std::vector<uint8_t> getBinaryData() const;
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
};
class ShaderEXTs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
{
public:
ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
std::vector<VkShaderEXT> shaders( createInfos.size() );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateShadersEXT(
static_cast<VkDevice>( *device ),
createInfos.size(),
reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
shaders.data() ) );
if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
this->reserve( createInfos.size() );
for ( auto const & shaderEXT : shaders )
{
this->emplace_back( device, shaderEXT, allocator );
}
}
else
{
throwResultException( result, "vkCreateShadersEXT" );
}
}
ShaderEXTs( std::nullptr_t ) {}
ShaderEXTs() = delete;
ShaderEXTs( ShaderEXTs const & ) = delete;
ShaderEXTs( ShaderEXTs && rhs ) = default;
ShaderEXTs & operator=( ShaderEXTs const & ) = delete;
ShaderEXTs & operator=( ShaderEXTs && rhs ) = default;
};
class ShaderModule
{
public:
@ -13598,14 +13794,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && "Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode &&
"Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && "Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace &&
"Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
@ -13613,7 +13811,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology &&
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
@ -13622,7 +13820,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount &&
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
@ -13632,7 +13830,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount &&
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
@ -13645,7 +13843,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 &&
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
@ -13671,7 +13869,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable &&
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
@ -13679,7 +13877,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable &&
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
@ -13687,7 +13885,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp &&
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
@ -13695,7 +13893,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable &&
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
@ -13703,7 +13901,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable &&
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
@ -13714,7 +13912,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && "Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp &&
"Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@ -13727,7 +13926,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable &&
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
@ -13735,7 +13934,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable &&
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
@ -13743,7 +13942,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable &&
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
@ -17779,7 +17978,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT &&
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
@ -17787,7 +17986,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT &&
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
@ -17795,7 +17994,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
@ -17804,7 +18003,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT &&
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
@ -17814,7 +18013,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT &&
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
@ -17828,7 +18027,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT &&
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
@ -17854,7 +18053,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT &&
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
@ -17862,7 +18061,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
@ -17870,7 +18069,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT &&
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
@ -17878,7 +18077,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
@ -17886,7 +18085,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT &&
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
@ -17898,7 +18097,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT &&
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@ -18717,7 +18916,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const
VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_vertex_input_dynamic_state>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT &&
"Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" );
getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexBindingDescriptions.size(),
@ -18912,15 +19112,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT &&
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2>" );
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
}
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
@ -18928,22 +19129,24 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT &&
"Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
@ -19431,7 +19634,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT &&
"Function <vkCmdSetTessellationDomainOriginEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetTessellationDomainOriginEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
@ -19439,14 +19642,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && "Function <vkCmdSetDepthClampEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT &&
"Function <vkCmdSetDepthClampEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && "Function <vkCmdSetPolygonModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT &&
"Function <vkCmdSetPolygonModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
}
@ -19454,7 +19659,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT &&
"Function <vkCmdSetRasterizationSamplesEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetRasterizationSamplesEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
@ -19463,7 +19668,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT &&
"Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING
@ -19478,21 +19684,23 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT &&
"Function <vkCmdSetAlphaToCoverageEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetAlphaToCoverageEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && "Function <vkCmdSetAlphaToOneEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT &&
"Function <vkCmdSetAlphaToOneEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && "Function <vkCmdSetLogicOpEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT &&
"Function <vkCmdSetLogicOpEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
}
@ -19500,7 +19708,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT(
uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT &&
"Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19513,7 +19722,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT &&
"Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19525,7 +19734,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstAttachment,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT &&
"Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19536,7 +19746,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT &&
"Function <vkCmdSetRasterizationStreamEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetRasterizationStreamEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
}
@ -19545,7 +19755,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT &&
"Function <vkCmdSetConservativeRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetConservativeRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
@ -19554,14 +19764,15 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT &&
"Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
}
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && "Function <vkCmdSetDepthClipEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT &&
"Function <vkCmdSetDepthClipEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
}
@ -19569,7 +19780,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT &&
"Function <vkCmdSetSampleLocationsEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetSampleLocationsEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
}
@ -19579,7 +19790,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT &&
"Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstAttachment,
@ -19591,7 +19802,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT &&
"Function <vkCmdSetProvokingVertexModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetProvokingVertexModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
@ -19601,7 +19812,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT &&
"Function <vkCmdSetLineRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetLineRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
@ -19609,7 +19820,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && "Function <vkCmdSetLineStippleEnableEXT> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT &&
"Function <vkCmdSetLineStippleEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
}
@ -19617,7 +19829,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT &&
"Function <vkCmdSetDepthClipNegativeOneToOneEXT> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetDepthClipNegativeOneToOneEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
}
@ -19625,7 +19837,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV &&
"Function <vkCmdSetViewportWScalingEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetViewportWScalingEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
}
@ -19634,7 +19846,8 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t firstViewport,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV &&
"Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
firstViewport,
@ -19645,7 +19858,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV &&
"Function <vkCmdSetCoverageToColorEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageToColorEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
}
@ -19653,7 +19866,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV &&
"Function <vkCmdSetCoverageToColorLocationNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageToColorLocationNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
}
@ -19662,7 +19875,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV &&
"Function <vkCmdSetCoverageModulationModeNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageModulationModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
@ -19672,7 +19885,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV &&
"Function <vkCmdSetCoverageModulationTableEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageModulationTableEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( coverageModulationTableEnable ) );
@ -19682,7 +19895,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV &&
"Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageModulationTableNV(
static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
@ -19691,7 +19904,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV &&
"Function <vkCmdSetShadingRateImageEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetShadingRateImageEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
}
@ -19700,7 +19913,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV &&
"Function <vkCmdSetRepresentativeFragmentTestEnableNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetRepresentativeFragmentTestEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkBool32>( representativeFragmentTestEnable ) );
@ -19710,7 +19923,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV &&
"Function <vkCmdSetCoverageReductionModeNV> requires <VK_EXT_extended_dynamic_state3>" );
"Function <vkCmdSetCoverageReductionModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
@ -19812,6 +20025,63 @@ namespace VULKAN_HPP_NAMESPACE
reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs( *this, createInfos, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderEXT
Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
{
return VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, createInfo, allocator );
}
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ShaderEXT::getBinaryData() const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" );
std::vector<uint8_t> data;
size_t dataSize;
VkResult result;
do
{
result = getDispatcher()->vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, nullptr );
if ( ( result == VK_SUCCESS ) && dataSize )
{
data.resize( dataSize );
result = getDispatcher()->vkGetShaderBinaryDataEXT(
static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
}
} while ( result == VK_INCOMPLETE );
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" );
VULKAN_HPP_ASSERT( dataSize <= data.size() );
if ( dataSize < data.size() )
{
data.resize( dataSize );
}
return data;
}
VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" );
if ( stages.size() != shaders.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
}
getDispatcher()->vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
stages.size(),
reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
}
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM() const

View File

@ -5780,6 +5780,22 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
"PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_shader_tile_image ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>::value,
"PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>::value,
"PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_opacity_micromap ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" );
@ -6321,6 +6337,30 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
"PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_shader_object ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderEXT>::value, "ShaderEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>::value,
"PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ),
"struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>::value,
"struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>::value,
"PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>::value,
"ShaderCreateInfoEXT is not nothrow_move_constructible!" );
//=== VK_QCOM_tile_properties ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ),

View File

@ -73700,6 +73700,190 @@ namespace VULKAN_HPP_NAMESPACE
using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
};
struct PhysicalDeviceShaderObjectFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderObject( shaderObject_ )
{
}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT
{
shaderObject = shaderObject_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
}
operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderObjectFeaturesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderObject );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject );
# endif
}
bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectFeaturesEXT>
{
using Type = PhysicalDeviceShaderObjectFeaturesEXT;
};
struct PhysicalDeviceShaderObjectPropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array<uint8_t, VK_UUID_SIZE> const & shaderBinaryUUID_ = {},
uint32_t shaderBinaryVersion_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderBinaryUUID( shaderBinaryUUID_ )
, shaderBinaryVersion( shaderBinaryVersion_ )
{
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
}
operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderObjectPropertiesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::
tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, uint32_t const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) &&
( shaderBinaryVersion == rhs.shaderBinaryVersion );
# endif
}
bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> shaderBinaryUUID = {};
uint32_t shaderBinaryVersion = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderObjectPropertiesEXT>
{
using Type = PhysicalDeviceShaderObjectPropertiesEXT;
};
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
{
using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
@ -74187,6 +74371,226 @@ namespace VULKAN_HPP_NAMESPACE
using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
struct PhysicalDeviceShaderTileImageFeaturesEXT
{
using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderTileImageColorReadAccess( shaderTileImageColorReadAccess_ )
, shaderTileImageDepthReadAccess( shaderTileImageDepthReadAccess_ )
, shaderTileImageStencilReadAccess( shaderTileImageStencilReadAccess_ )
{
}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT &
setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT
{
shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
}
operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTileImageFeaturesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
void * const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) &&
( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) &&
( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess );
# endif
}
bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT>
{
using Type = PhysicalDeviceShaderTileImageFeaturesEXT;
};
struct PhysicalDeviceShaderTileImagePropertiesEXT
{
using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {},
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {},
void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, shaderTileImageCoherentReadAccelerated( shaderTileImageCoherentReadAccelerated_ )
, shaderTileImageReadSampleFromPixelRateInvocation( shaderTileImageReadSampleFromPixelRateInvocation_ )
, shaderTileImageReadFromHelperInvocation( shaderTileImageReadFromHelperInvocation_ )
{
}
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
: PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast<PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs ) )
{
}
PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const *>( &rhs );
return *this;
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
}
operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkPhysicalDeviceShaderTileImagePropertiesEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
void * const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &,
VULKAN_HPP_NAMESPACE::Bool32 const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie(
sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default;
#else
bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
# if defined( VULKAN_HPP_USE_REFLECT )
return this->reflect() == rhs.reflect();
# else
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) &&
( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) &&
( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation );
# endif
}
bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
#endif
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT;
void * pNext = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {};
VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {};
};
template <>
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT>
{
using Type = PhysicalDeviceShaderTileImagePropertiesEXT;
};
struct PhysicalDeviceShadingRateImageFeaturesNV
{
using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;
@ -83706,6 +84110,7 @@ namespace VULKAN_HPP_NAMESPACE
};
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
struct PipelineTessellationDomainOriginStateCreateInfo
{
@ -94373,6 +94778,321 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t data = {};
};
struct ShaderCreateInfoEXT
{
using NativeType = VkShaderCreateInfoEXT;
static const bool allowDuplicate = false;
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT;
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {},
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {},
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary,
size_t codeSize_ = {},
const void * pCode_ = {},
const char * pName_ = {},
uint32_t setLayoutCount_ = {},
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {},
uint32_t pushConstantRangeCount_ = {},
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {},
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT
: pNext( pNext_ )
, flags( flags_ )
, stage( stage_ )
, nextStage( nextStage_ )
, codeType( codeType_ )
, codeSize( codeSize_ )
, pCode( pCode_ )
, pName( pName_ )
, setLayoutCount( setLayoutCount_ )
, pSetLayouts( pSetLayouts_ )
, pushConstantRangeCount( pushConstantRangeCount_ )
, pPushConstantRanges( pPushConstantRanges_ )
, pSpecializationInfo( pSpecializationInfo_ )
{
}
VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast<ShaderCreateInfoEXT const *>( &rhs ) )
{
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_,
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_,
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_,
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & code_,
const char * pName_ = {},
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ = {},
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {},
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {},
const void * pNext_ = nullptr )
: pNext( pNext_ )
, flags( flags_ )
, stage( stage_ )
, nextStage( nextStage_ )
, codeType( codeType_ )
, codeSize( code_.size() * sizeof( T ) )
, pCode( code_.data() )
, pName( pName_ )
, setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
, pSetLayouts( setLayouts_.data() )
, pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
, pPushConstantRanges( pushConstantRanges_.data() )
, pSpecializationInfo( pSpecializationInfo_ )
{
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const *>( &rhs );
return *this;
}
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT
{
nextStage = nextStage_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT
{
codeType = codeType_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = codeSize_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT
{
pCode = pCode_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
template <typename T>
ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & code_ ) VULKAN_HPP_NOEXCEPT
{
codeSize = code_.size() * sizeof( T );
pCode = code_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
{
pName = pName_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = setLayoutCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderCreateInfoEXT &
setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
{
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
pSetLayouts = setLayouts_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = pushConstantRangeCount_;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT &
setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
}
# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
ShaderCreateInfoEXT & setPushConstantRanges(
VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
pPushConstantRanges = pushConstantRanges_.data();
return *this;
}
# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT &
setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pSpecializationInfo = pSpecializationInfo_;
return *this;
}
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<const VkShaderCreateInfoEXT *>( this );
}
operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
{
return *reinterpret_cast<VkShaderCreateInfoEXT *>( this );
}
#if defined( VULKAN_HPP_USE_REFLECT )
# if 14 <= VULKAN_HPP_CPP_VERSION
auto
# else
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &,
const void * const &,
VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT const &,
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &,
VULKAN_HPP_NAMESPACE::ShaderStageFlags const &,
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT const &,
size_t const &,
const void * const &,
const char * const &,
uint32_t const &,
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &,
uint32_t const &,
const VULKAN_HPP_NAMESPACE::PushConstantRange * const &,
const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
# endif
reflect() const VULKAN_HPP_NOEXCEPT
{
return std::tie( sType,
pNext,
flags,
stage,
nextStage,
codeType,
codeSize,
pCode,
pName,
setLayoutCount,
pSetLayouts,
pushConstantRangeCount,
pPushConstantRanges,
pSpecializationInfo );
}
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
if ( auto cmp = sType <=> rhs.sType; cmp != 0 )
return cmp;
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 )
return cmp;
if ( auto cmp = flags <=> rhs.flags; cmp != 0 )
return cmp;
if ( auto cmp = stage <=> rhs.stage; cmp != 0 )
return cmp;
if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 )
return cmp;
if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 )
return cmp;
if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 )
return cmp;
if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 )
return cmp;
if ( pName != rhs.pName )
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 )
return cmp;
if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 )
return cmp;
if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 )
return cmp;
if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 )
return cmp;
if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 )
return cmp;
return std::strong_ordering::equivalent;
}
#endif
bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) &&
( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) &&
( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) &&
( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
}
bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT;
const void * pNext = {};
VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {};
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {};
VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary;
size_t codeSize = {};
const void * pCode = {};
const char * pName = {};
uint32_t setLayoutCount = {};
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
uint32_t pushConstantRangeCount = {};
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
};
template <>
struct CppType<StructureType, StructureType::eShaderCreateInfoEXT>
{
using Type = ShaderCreateInfoEXT;
};
struct ShaderModuleCreateInfo
{
using NativeType = VkShaderModuleCreateInfo;

View File

@ -3206,6 +3206,32 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value )
{
if ( !value )
return "{}";
std::string result;
if ( value & ShaderCreateFlagBitsEXT::eLinkStage )
result += "LinkStage | ";
if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize )
result += "AllowVaryingSubgroupSize | ";
if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups )
result += "RequireFullSubgroups | ";
if ( value & ShaderCreateFlagBitsEXT::eNoTaskShader )
result += "NoTaskShader | ";
if ( value & ShaderCreateFlagBitsEXT::eDispatchBase )
result += "DispatchBase | ";
if ( value & ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment )
result += "FragmentShadingRateAttachment | ";
if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment )
result += "FragmentDensityMapAttachment | ";
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
}
//=======================
//=== ENUMs to_string ===
//=======================
@ -3277,6 +3303,7 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT";
case Result::eErrorIncompatibleShaderBinaryEXT: return "ErrorIncompatibleShaderBinaryEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@ -4019,6 +4046,8 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT";
case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT";
case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT";
case StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT: return "PhysicalDeviceShaderTileImageFeaturesEXT";
case StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT: return "PhysicalDeviceShaderTileImagePropertiesEXT";
case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT";
case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT";
case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT";
@ -4081,6 +4110,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV";
case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT";
case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT";
case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT";
case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT";
case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM";
case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM";
case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC";
@ -4162,6 +4194,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_FUCHSIA*/
case ObjectType::eMicromapEXT: return "MicromapEXT";
case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV";
case ObjectType::eShaderEXT: return "ShaderEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
@ -8313,6 +8346,33 @@ namespace VULKAN_HPP_NAMESPACE
}
}
//=== VK_EXT_shader_object ===
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value )
{
switch ( value )
{
case ShaderCreateFlagBitsEXT::eLinkStage: return "LinkStage";
case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize";
case ShaderCreateFlagBitsEXT::eRequireFullSubgroups: return "RequireFullSubgroups";
case ShaderCreateFlagBitsEXT::eNoTaskShader: return "NoTaskShader";
case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase";
case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment";
case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
VULKAN_HPP_INLINE std::string to_string( ShaderCodeTypeEXT value )
{
switch ( value )
{
case ShaderCodeTypeEXT::eBinary: return "Binary";
case ShaderCodeTypeEXT::eSpirv: return "Spirv";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
//=== VK_NV_ray_tracing_invocation_reorder ===
VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value )

View File

@ -5420,6 +5420,15 @@ namespace VULKAN_HPP_NAMESPACE
};
};
template <>
struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfo, ShaderCreateInfoEXT>
{
enum
{
value = true
};
};
template <>
struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, PhysicalDeviceFeatures2>
{

View File

@ -410,6 +410,7 @@ namespace VULKAN_HPP_NAMESPACE
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
struct PhysicalDeviceInlineUniformBlockFeatures;
using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
struct PhysicalDeviceInlineUniformBlockProperties;

View File

@ -8506,14 +8506,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && "Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode &&
"Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && "Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace &&
"Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
@ -8521,7 +8523,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology &&
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
@ -8530,7 +8532,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount &&
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
@ -8540,7 +8542,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount &&
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCount(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
@ -8553,7 +8555,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 &&
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
@ -8579,7 +8581,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable &&
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
@ -8587,7 +8589,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable &&
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
@ -8595,7 +8597,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp &&
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
@ -8603,7 +8605,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable &&
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
@ -8611,7 +8613,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable &&
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
@ -8622,7 +8624,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && "Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp &&
"Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@ -8635,7 +8638,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable &&
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
@ -8643,7 +8646,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable &&
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
@ -8651,7 +8654,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable &&
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}
@ -9911,7 +9914,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT &&
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
}
@ -9919,7 +9922,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT &&
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
}
@ -9927,7 +9930,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
}
@ -9936,7 +9939,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT &&
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetViewportWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
@ -9946,7 +9949,7 @@ namespace VULKAN_HPP_NAMESPACE
CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT &&
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetScissorWithCountEXT(
static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
@ -9960,7 +9963,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT &&
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
if ( buffers.size() != offsets.size() )
{
throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
@ -9986,7 +9989,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT &&
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
}
@ -9994,7 +9997,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
}
@ -10002,7 +10005,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT &&
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
}
@ -10010,7 +10013,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
}
@ -10018,7 +10021,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT &&
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
}
@ -10030,7 +10033,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT &&
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_VERSION_1_3>" );
"Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
static_cast<VkStencilFaceFlags>( faceMask ),
@ -10235,7 +10238,8 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const
VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_vertex_input_dynamic_state>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT &&
"Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" );
getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
vertexBindingDescriptions.size(),
@ -10379,15 +10383,16 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT &&
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2>" );
"Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
}
VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
"Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
}
@ -10395,22 +10400,24 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
"Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2>" );
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT &&
"Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
}
VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_VERSION_1_3>" );
VULKAN_HPP_ASSERT(
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
"Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
}

View File

@ -45371,6 +45371,7 @@ namespace VULKAN_HPP_NAMESPACE
};
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
struct PipelineTessellationDomainOriginStateCreateInfo
{