mirror of
https://github.com/KhronosGroup/Vulkan-Hpp.git
synced 2024-10-14 16:32:17 +00:00
Update Vulkan-Headers to v1.3.260 (#1629)
Co-authored-by: GitHub <noreply@github.com>
This commit is contained in:
parent
b988e54dad
commit
68052d9343
@ -1 +1 @@
|
||||
Subproject commit cb7b123f2ddc04b86fd106c3a2b2e9872e8215b5
|
||||
Subproject commit 94bb3c998b9156b9101421f7614617dfcf7f4256
|
@ -800,6 +800,12 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV;
|
||||
using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR;
|
||||
using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR;
|
||||
using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR;
|
||||
using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR;
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT;
|
||||
using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT;
|
||||
@ -927,6 +933,11 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
//=== VK_KHR_external_memory ===
|
||||
using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKhr;
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAmdx;
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_KHR_ray_tracing_pipeline ===
|
||||
using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr;
|
||||
|
||||
@ -1796,6 +1807,18 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID;
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX;
|
||||
using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX;
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT;
|
||||
using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT;
|
||||
@ -2114,13 +2137,11 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT;
|
||||
using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT;
|
||||
using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT;
|
||||
using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
|
||||
using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT;
|
||||
using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT;
|
||||
using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT;
|
||||
using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR;
|
||||
@ -2565,6 +2586,18 @@ export namespace VULKAN_HPP_NAMESPACE
|
||||
//=== VK_EXT_pipeline_protected_access ===
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR;
|
||||
using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR;
|
||||
using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
|
||||
using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR;
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR;
|
||||
using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR;
|
||||
using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR;
|
||||
using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
|
||||
using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR;
|
||||
|
||||
//=== VK_KHR_ray_tracing_position_fetch ===
|
||||
using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
|
||||
|
||||
|
@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
|
||||
# include <span>
|
||||
#endif
|
||||
|
||||
static_assert( VK_HEADER_VERSION == 259, "Wrong VK_HEADER_VERSION!" );
|
||||
static_assert( VK_HEADER_VERSION == 260, "Wrong VK_HEADER_VERSION!" );
|
||||
|
||||
// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
|
||||
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
|
||||
@ -3997,6 +3997,59 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device,
|
||||
VkPipelineCache pipelineCache,
|
||||
uint32_t createInfoCount,
|
||||
const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
|
||||
const VkAllocationCallbacks * pAllocator,
|
||||
VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
|
||||
}
|
||||
|
||||
VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device,
|
||||
VkPipeline executionGraph,
|
||||
VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo );
|
||||
}
|
||||
|
||||
VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device,
|
||||
VkPipeline executionGraph,
|
||||
const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
|
||||
uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex );
|
||||
}
|
||||
|
||||
void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, scratch );
|
||||
}
|
||||
|
||||
void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer,
|
||||
VkDeviceAddress scratch,
|
||||
const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, pCountInfo );
|
||||
}
|
||||
|
||||
void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer,
|
||||
VkDeviceAddress scratch,
|
||||
const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, pCountInfo );
|
||||
}
|
||||
|
||||
void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, countInfo );
|
||||
}
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
|
||||
void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
@ -4943,8 +4996,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
void vkGetImageSubresourceLayout2EXT( VkDevice device,
|
||||
VkImage image,
|
||||
const VkImageSubresource2EXT * pSubresource,
|
||||
VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
|
||||
const VkImageSubresource2KHR * pSubresource,
|
||||
VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
|
||||
}
|
||||
@ -5970,6 +6023,36 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
|
||||
}
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const
|
||||
VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType );
|
||||
}
|
||||
|
||||
void vkGetRenderingAreaGranularityKHR( VkDevice device,
|
||||
const VkRenderingAreaInfoKHR * pRenderingAreaInfo,
|
||||
VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity );
|
||||
}
|
||||
|
||||
void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device,
|
||||
const VkDeviceImageSubresourceInfoKHR * pInfo,
|
||||
VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout );
|
||||
}
|
||||
|
||||
void vkGetImageSubresourceLayout2KHR( VkDevice device,
|
||||
VkImage image,
|
||||
const VkImageSubresource2KHR * pSubresource,
|
||||
VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout );
|
||||
}
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
VkResult vkCreateShadersEXT( VkDevice device,
|
||||
@ -7015,6 +7098,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
//=== VK_KHR_external_memory ===
|
||||
VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKhr = VK_QUEUE_FAMILY_EXTERNAL_KHR;
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAmdx = VK_SHADER_INDEX_UNUSED_AMDX;
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_KHR_ray_tracing_pipeline ===
|
||||
VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR;
|
||||
|
||||
@ -8150,7 +8238,16 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct StructExtends<PipelineCreationFeedbackCreateInfo, ExecutionGraphPipelineCreateInfoAMDX>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, PhysicalDeviceFeatures2>
|
||||
{
|
||||
@ -9721,6 +9818,45 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderEnqueueFeaturesAMDX, PhysicalDeviceFeatures2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderEnqueueFeaturesAMDX, DeviceCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderEnqueuePropertiesAMDX, PhysicalDeviceProperties2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PipelineShaderStageNodeCreateInfoAMDX, PipelineShaderStageCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
template <>
|
||||
struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>
|
||||
@ -10199,6 +10335,16 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
value = true
|
||||
};
|
||||
};
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct StructExtends<PipelineCompilerControlCreateInfoAMD, ExecutionGraphPipelineCreateInfoAMDX>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_AMD_shader_core_properties ===
|
||||
template <>
|
||||
@ -11117,7 +11263,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<SubresourceHostMemcpySizeEXT, SubresourceLayout2EXT>
|
||||
struct StructExtends<SubresourceHostMemcpySizeEXT, SubresourceLayout2KHR>
|
||||
{
|
||||
enum
|
||||
{
|
||||
@ -12287,7 +12433,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2EXT>
|
||||
struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2KHR>
|
||||
{
|
||||
enum
|
||||
{
|
||||
@ -13557,6 +13703,106 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
};
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceMaintenance5FeaturesKHR, PhysicalDeviceFeatures2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceMaintenance5FeaturesKHR, DeviceCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceMaintenance5PropertiesKHR, PhysicalDeviceProperties2>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PipelineCreateFlags2CreateInfoKHR, ComputePipelineCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PipelineCreateFlags2CreateInfoKHR, GraphicsPipelineCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PipelineCreateFlags2CreateInfoKHR, RayTracingPipelineCreateInfoNV>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PipelineCreateFlags2CreateInfoKHR, RayTracingPipelineCreateInfoKHR>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<BufferUsageFlags2CreateInfoKHR, BufferViewCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<BufferUsageFlags2CreateInfoKHR, BufferCreateInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<BufferUsageFlags2CreateInfoKHR, PhysicalDeviceExternalBufferInfo>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<BufferUsageFlags2CreateInfoKHR, DescriptorBufferBindingInfoEXT>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
//=== VK_KHR_ray_tracing_position_fetch ===
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceRayTracingPositionFetchFeaturesKHR, PhysicalDeviceFeatures2>
|
||||
@ -13975,7 +14221,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
# elif defined( __APPLE__ )
|
||||
m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
|
||||
# elif defined( _WIN32 )
|
||||
m_library = ::LoadLibraryA( "vulkan-1.dll" );
|
||||
m_library = ::LoadLibraryA( "vulkan-1.dll" );
|
||||
# else
|
||||
# error unsupported platform
|
||||
# endif
|
||||
@ -14609,6 +14855,25 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0;
|
||||
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0;
|
||||
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0;
|
||||
PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0;
|
||||
PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0;
|
||||
PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0;
|
||||
PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0;
|
||||
#else
|
||||
PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0;
|
||||
PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0;
|
||||
PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0;
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
|
||||
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
|
||||
@ -15093,6 +15358,12 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
|
||||
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0;
|
||||
PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0;
|
||||
PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0;
|
||||
PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0;
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
|
||||
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
|
||||
@ -15788,6 +16059,20 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) );
|
||||
vkGetExecutionGraphPipelineScratchSizeAMDX =
|
||||
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
|
||||
vkGetExecutionGraphPipelineNodeIndexAMDX =
|
||||
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
|
||||
vkCmdInitializeGraphScratchMemoryAMDX =
|
||||
PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
|
||||
vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) );
|
||||
vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) );
|
||||
vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) );
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
|
||||
vkGetPhysicalDeviceMultisamplePropertiesEXT =
|
||||
@ -16092,6 +16377,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) );
|
||||
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) );
|
||||
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
|
||||
if ( !vkGetImageSubresourceLayout2KHR )
|
||||
vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) );
|
||||
@ -16412,6 +16699,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
|
||||
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) );
|
||||
vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) );
|
||||
vkGetDeviceImageSubresourceLayoutKHR =
|
||||
PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) );
|
||||
vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) );
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) );
|
||||
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) );
|
||||
@ -16845,6 +17139,20 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) );
|
||||
vkGetExecutionGraphPipelineScratchSizeAMDX =
|
||||
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
|
||||
vkGetExecutionGraphPipelineNodeIndexAMDX =
|
||||
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
|
||||
vkCmdInitializeGraphScratchMemoryAMDX =
|
||||
PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
|
||||
vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) );
|
||||
vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) );
|
||||
vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) );
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
|
||||
|
||||
@ -17109,6 +17417,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
|
||||
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
|
||||
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
|
||||
if ( !vkGetImageSubresourceLayout2KHR )
|
||||
vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
|
||||
@ -17397,6 +17707,12 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
|
||||
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) );
|
||||
vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) );
|
||||
vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) );
|
||||
vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) );
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
|
||||
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
|
||||
|
@ -577,8 +577,15 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
|
||||
eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID,
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
|
||||
eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
|
||||
ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
|
||||
eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
ePhysicalDeviceShaderEnqueueFeaturesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX,
|
||||
ePhysicalDeviceShaderEnqueuePropertiesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX,
|
||||
eExecutionGraphPipelineScratchSizeAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX,
|
||||
eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX,
|
||||
ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX,
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
|
||||
ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
|
||||
eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
|
||||
@ -1090,6 +1097,14 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
|
||||
ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
|
||||
ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
|
||||
ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR,
|
||||
ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR,
|
||||
eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR,
|
||||
eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR,
|
||||
eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR,
|
||||
eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR,
|
||||
ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR,
|
||||
eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR,
|
||||
ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR,
|
||||
ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT,
|
||||
ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT,
|
||||
@ -1500,7 +1515,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT,
|
||||
eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
|
||||
eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
|
||||
eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV
|
||||
eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
|
||||
eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR,
|
||||
eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR
|
||||
};
|
||||
|
||||
enum class FormatFeatureFlagBits : VkFormatFeatureFlags
|
||||
@ -2115,21 +2132,24 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
enum class BufferUsageFlagBits : VkBufferUsageFlags
|
||||
{
|
||||
eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
|
||||
eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
|
||||
eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
|
||||
eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
|
||||
eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
|
||||
eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
|
||||
eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
|
||||
eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
|
||||
eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
|
||||
eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
|
||||
eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
|
||||
eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
|
||||
eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
|
||||
eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
|
||||
eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
|
||||
eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
|
||||
eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
|
||||
eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
|
||||
eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
|
||||
eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
|
||||
eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
|
||||
eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
|
||||
eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
|
||||
eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
|
||||
eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
|
||||
eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
|
||||
eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
|
||||
eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
|
||||
eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
|
||||
eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX,
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
|
||||
eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
|
||||
eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
|
||||
@ -2158,8 +2178,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
BufferUsageFlagBits::eStorageTexelBuffer | BufferUsageFlagBits::eUniformBuffer | BufferUsageFlagBits::eStorageBuffer | BufferUsageFlagBits::eIndexBuffer |
|
||||
BufferUsageFlagBits::eVertexBuffer | BufferUsageFlagBits::eIndirectBuffer | BufferUsageFlagBits::eShaderDeviceAddress |
|
||||
BufferUsageFlagBits::eVideoDecodeSrcKHR | BufferUsageFlagBits::eVideoDecodeDstKHR | BufferUsageFlagBits::eTransformFeedbackBufferEXT |
|
||||
BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT |
|
||||
BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR |
|
||||
BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
| BufferUsageFlagBits::eExecutionGraphScratchAMDX
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
| BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR |
|
||||
BufferUsageFlagBits::eShaderBindingTableKHR
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
| BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR
|
||||
@ -3046,8 +3069,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
enum class PipelineBindPoint
|
||||
{
|
||||
eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
|
||||
eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
|
||||
eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
|
||||
eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
eExecutionGraphAMDX = VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX,
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
|
||||
eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
|
||||
eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI
|
||||
@ -6608,6 +6634,129 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints;
|
||||
};
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR
|
||||
{
|
||||
eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR,
|
||||
eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR,
|
||||
eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR,
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
eReserved28NV = VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV,
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
|
||||
eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR,
|
||||
eDeferCompile = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR,
|
||||
eCaptureStatistics = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR,
|
||||
eCaptureInternalRepresentations = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
|
||||
eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR,
|
||||
eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR,
|
||||
eLinkTimeOptimization = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR,
|
||||
eRetainLinkTimeOptimizationInfo = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR,
|
||||
eLibrary = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR,
|
||||
eRayTracingSkipTriangles = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
|
||||
eRayTracingSkipAabbs = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR,
|
||||
eRayTracingNoNullAnyHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
|
||||
eRayTracingNoNullClosestHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
|
||||
eRayTracingNoNullMissShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
|
||||
eRayTracingNoNullIntersectionShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
|
||||
eRayTracingShaderGroupHandleCaptureReplay = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
|
||||
eIndirectBindable = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR,
|
||||
eRayTracingAllowMotion = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR,
|
||||
eRenderingFragmentShadingRateAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
|
||||
eRenderingFragmentDensityMapAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR,
|
||||
eRayTracingOpacityMicromap = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR,
|
||||
eColorAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR,
|
||||
eDepthStencilAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR,
|
||||
eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR,
|
||||
eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR,
|
||||
eDescriptorBuffer = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR
|
||||
};
|
||||
|
||||
using PipelineCreateFlags2KHR = Flags<PipelineCreateFlagBits2KHR>;
|
||||
|
||||
template <>
|
||||
struct FlagTraits<PipelineCreateFlagBits2KHR>
|
||||
{
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags =
|
||||
PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
| PipelineCreateFlagBits2KHR::eReserved28NV
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
| PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompile |
|
||||
PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations |
|
||||
PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure |
|
||||
PipelineCreateFlagBits2KHR::eLinkTimeOptimization | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo | PipelineCreateFlagBits2KHR::eLibrary |
|
||||
PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs |
|
||||
PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders |
|
||||
PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders |
|
||||
PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindable |
|
||||
PipelineCreateFlagBits2KHR::eRayTracingAllowMotion | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment |
|
||||
PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap |
|
||||
PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop |
|
||||
PipelineCreateFlagBits2KHR::eNoProtectedAccess | PipelineCreateFlagBits2KHR::eProtectedAccessOnly | PipelineCreateFlagBits2KHR::eDescriptorBuffer;
|
||||
};
|
||||
|
||||
enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR
|
||||
{
|
||||
eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR,
|
||||
eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR,
|
||||
eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR,
|
||||
eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR,
|
||||
eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR,
|
||||
eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR,
|
||||
eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR,
|
||||
eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR,
|
||||
eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR,
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX,
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
eConditionalRendering = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR,
|
||||
eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR,
|
||||
eRayTracing = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR,
|
||||
eTransformFeedbackBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR,
|
||||
eTransformFeedbackCounterBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR,
|
||||
eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR,
|
||||
eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR,
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
eVideoEncodeDst = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR,
|
||||
eVideoEncodeSrc = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR,
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR,
|
||||
eAccelerationStructureBuildInputReadOnly = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
|
||||
eAccelerationStructureStorage = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
|
||||
eSamplerDescriptorBuffer = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR,
|
||||
eResourceDescriptorBuffer = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR,
|
||||
ePushDescriptorsDescriptorBuffer = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR,
|
||||
eMicromapBuildInputReadOnly = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR,
|
||||
eMicromapStorage = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR
|
||||
};
|
||||
|
||||
using BufferUsageFlags2KHR = Flags<BufferUsageFlagBits2KHR>;
|
||||
|
||||
template <>
|
||||
struct FlagTraits<BufferUsageFlagBits2KHR>
|
||||
{
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
|
||||
static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2KHR allFlags =
|
||||
BufferUsageFlagBits2KHR::eTransferSrc | BufferUsageFlagBits2KHR::eTransferDst | BufferUsageFlagBits2KHR::eUniformTexelBuffer |
|
||||
BufferUsageFlagBits2KHR::eStorageTexelBuffer | BufferUsageFlagBits2KHR::eUniformBuffer | BufferUsageFlagBits2KHR::eStorageBuffer |
|
||||
BufferUsageFlagBits2KHR::eIndexBuffer | BufferUsageFlagBits2KHR::eVertexBuffer | BufferUsageFlagBits2KHR::eIndirectBuffer
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
| BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
| BufferUsageFlagBits2KHR::eConditionalRendering | BufferUsageFlagBits2KHR::eShaderBindingTable | BufferUsageFlagBits2KHR::eTransformFeedbackBuffer |
|
||||
BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer | BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
| BufferUsageFlagBits2KHR::eVideoEncodeDst | BufferUsageFlagBits2KHR::eVideoEncodeSrc
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
| BufferUsageFlagBits2KHR::eShaderDeviceAddress | BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly |
|
||||
BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer |
|
||||
BufferUsageFlagBits2KHR::eResourceDescriptorBuffer | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer |
|
||||
BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly | BufferUsageFlagBits2KHR::eMicromapStorage;
|
||||
};
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT
|
||||
|
@ -166,6 +166,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
"VK_EXT_sampler_filter_minmax",
|
||||
"VK_KHR_storage_buffer_storage_class",
|
||||
"VK_AMD_gpu_shader_int16",
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
"VK_AMDX_shader_enqueue",
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
"VK_AMD_mixed_attachment_samples",
|
||||
"VK_AMD_shader_fragment_mask",
|
||||
"VK_EXT_inline_uniform_block",
|
||||
@ -380,6 +383,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
"VK_NV_optical_flow",
|
||||
"VK_EXT_legacy_dithering",
|
||||
"VK_EXT_pipeline_protected_access",
|
||||
"VK_KHR_maintenance5",
|
||||
"VK_KHR_ray_tracing_position_fetch",
|
||||
"VK_EXT_shader_object",
|
||||
"VK_QCOM_tile_properties",
|
||||
@ -895,6 +899,16 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{ {
|
||||
"VK_KHR_get_physical_device_properties2",
|
||||
} } } } },
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
{ "VK_AMDX_shader_enqueue",
|
||||
{ { "VK_VERSION_1_0",
|
||||
{ {
|
||||
"VK_KHR_get_physical_device_properties2",
|
||||
"VK_KHR_synchronization2",
|
||||
"VK_KHR_pipeline_library",
|
||||
"VK_KHR_spirv_1_4",
|
||||
} } } } },
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
{ "VK_EXT_inline_uniform_block",
|
||||
{ { "VK_VERSION_1_0",
|
||||
{ {
|
||||
@ -1854,6 +1868,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{ {
|
||||
"VK_KHR_get_physical_device_properties2",
|
||||
} } } } },
|
||||
{ "VK_KHR_maintenance5",
|
||||
{ { "VK_VERSION_1_1",
|
||||
{ {
|
||||
"VK_KHR_dynamic_rendering",
|
||||
} } } } },
|
||||
{ "VK_KHR_ray_tracing_position_fetch",
|
||||
{ { "VK_VERSION_1_0",
|
||||
{ {
|
||||
@ -2539,7 +2558,11 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|| ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" )
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|| ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) ||
|
||||
( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) ||
|
||||
( extension == "VK_AMD_gpu_shader_int16" )
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
|| ( extension == "VK_AMDX_shader_enqueue" )
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|| ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) ||
|
||||
( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_EXT_sample_locations" ) ||
|
||||
( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) ||
|
||||
( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) ||
|
||||
@ -2644,7 +2667,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) ||
|
||||
( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) ||
|
||||
( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) ||
|
||||
( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) ||
|
||||
( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) || ( extension == "VK_KHR_maintenance5" ) ||
|
||||
( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_QCOM_tile_properties" ) ||
|
||||
( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) ||
|
||||
( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_EXT_mutable_descriptor_type" ) ||
|
||||
|
@ -363,6 +363,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 2;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1;
|
||||
|
||||
default: VULKAN_HPP_ASSERT( false ); return 0;
|
||||
}
|
||||
@ -620,6 +622,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP";
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP";
|
||||
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit";
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return "16-bit";
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return "8-bit alpha";
|
||||
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
@ -2008,6 +2012,21 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case 1: return 16;
|
||||
default: VULKAN_HPP_ASSERT( false ); return 0;
|
||||
}
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR:
|
||||
switch ( component )
|
||||
{
|
||||
case 0: return 1;
|
||||
case 1: return 5;
|
||||
case 2: return 5;
|
||||
case 3: return 5;
|
||||
default: VULKAN_HPP_ASSERT( false ); return 0;
|
||||
}
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR:
|
||||
switch ( component )
|
||||
{
|
||||
case 0: return 8;
|
||||
default: VULKAN_HPP_ASSERT( false ); return 0;
|
||||
}
|
||||
|
||||
default: return 0;
|
||||
}
|
||||
@ -2265,6 +2284,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 4;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1;
|
||||
|
||||
default: return 0;
|
||||
}
|
||||
@ -4285,6 +4306,21 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case 1: return "G";
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR:
|
||||
switch ( component )
|
||||
{
|
||||
case 0: return "A";
|
||||
case 1: return "B";
|
||||
case 2: return "G";
|
||||
case 3: return "R";
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR:
|
||||
switch ( component )
|
||||
{
|
||||
case 0: return "A";
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
|
||||
default: return "";
|
||||
}
|
||||
@ -6305,6 +6341,21 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case 1: return "SINT";
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR:
|
||||
switch ( component )
|
||||
{
|
||||
case 0: return "UNORM";
|
||||
case 1: return "UNORM";
|
||||
case 2: return "UNORM";
|
||||
case 3: return "UNORM";
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR:
|
||||
switch ( component )
|
||||
{
|
||||
case 0: return "UNORM";
|
||||
default: VULKAN_HPP_ASSERT( false ); return "";
|
||||
}
|
||||
|
||||
default: return "";
|
||||
}
|
||||
@ -6745,6 +6796,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 16;
|
||||
|
||||
default: return 0;
|
||||
}
|
||||
@ -7606,6 +7658,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
|
||||
case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 1;
|
||||
case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1;
|
||||
|
||||
default: VULKAN_HPP_ASSERT( false ); return 0;
|
||||
}
|
||||
|
@ -13585,6 +13585,318 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
|
||||
Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
uint32_t createInfoCount,
|
||||
const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
|
||||
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
|
||||
VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
createInfoCount,
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
|
||||
reinterpret_cast<VkPipeline *>( pPipelines ) ) );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename PipelineAllocator, typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
|
||||
VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
|
||||
m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
createInfos.size(),
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkPipeline *>( pipelines.data() ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
|
||||
|
||||
return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
|
||||
}
|
||||
|
||||
template <typename PipelineAllocator,
|
||||
typename Dispatch,
|
||||
typename B0,
|
||||
typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
PipelineAllocator & pipelineAllocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
|
||||
VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
|
||||
m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
createInfos.size(),
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkPipeline *>( pipelines.data() ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
|
||||
|
||||
return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
|
||||
}
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
|
||||
Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
|
||||
VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
|
||||
m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
1,
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkPipeline *>( &pipeline ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
|
||||
|
||||
return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_NO_SMART_HANDLE
|
||||
template <typename Dispatch, typename PipelineAllocator>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
|
||||
Device::createExecutionGraphPipelinesAMDXUnique(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
|
||||
VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
|
||||
m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
createInfos.size(),
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkPipeline *>( pipelines.data() ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
|
||||
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
|
||||
uniquePipelines.reserve( createInfos.size() );
|
||||
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
|
||||
for ( auto const & pipeline : pipelines )
|
||||
{
|
||||
uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
|
||||
}
|
||||
return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
|
||||
}
|
||||
|
||||
template <typename Dispatch,
|
||||
typename PipelineAllocator,
|
||||
typename B0,
|
||||
typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
|
||||
Device::createExecutionGraphPipelinesAMDXUnique(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
PipelineAllocator & pipelineAllocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
|
||||
VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
|
||||
m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
createInfos.size(),
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkPipeline *>( pipelines.data() ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
|
||||
std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
|
||||
uniquePipelines.reserve( createInfos.size() );
|
||||
ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
|
||||
for ( auto const & pipeline : pipelines )
|
||||
{
|
||||
uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
|
||||
}
|
||||
return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
|
||||
}
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
|
||||
Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::Pipeline pipeline;
|
||||
VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
|
||||
m_device,
|
||||
static_cast<VkPipelineCache>( pipelineCache ),
|
||||
1,
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
reinterpret_cast<VkPipeline *>( &pipeline ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique",
|
||||
{ VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
|
||||
|
||||
return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
|
||||
static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
|
||||
UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
|
||||
}
|
||||
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
|
||||
Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX(
|
||||
m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type
|
||||
Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
|
||||
VkResult result = d.vkGetExecutionGraphPipelineScratchSizeAMDX(
|
||||
m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" );
|
||||
|
||||
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sizeInfo );
|
||||
}
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
|
||||
Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
|
||||
const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
|
||||
uint32_t * pNodeIndex,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX(
|
||||
m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX(
|
||||
VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
uint32_t nodeIndex;
|
||||
VkResult result = d.vkGetExecutionGraphPipelineNodeIndexAMDX(
|
||||
m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" );
|
||||
|
||||
return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), nodeIndex );
|
||||
}
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) );
|
||||
}
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
|
||||
}
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkCmdDispatchGraphIndirectAMDX(
|
||||
m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
|
||||
}
|
||||
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
d.vkCmdDispatchGraphIndirectAMDX(
|
||||
m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
|
||||
}
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
|
||||
}
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
|
||||
template <typename Dispatch>
|
||||
@ -18105,45 +18417,45 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkGetImageSubresourceLayout2EXT( m_device,
|
||||
static_cast<VkImage>( image ),
|
||||
reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
|
||||
}
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT(
|
||||
VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT(
|
||||
VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
|
||||
d.vkGetImageSubresourceLayout2EXT( m_device,
|
||||
static_cast<VkImage>( image ),
|
||||
reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return layout;
|
||||
}
|
||||
|
||||
template <typename X, typename Y, typename... Z, typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
|
||||
VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
|
||||
d.vkGetImageSubresourceLayout2EXT( m_device,
|
||||
static_cast<VkImage>( image ),
|
||||
reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return structureChain;
|
||||
}
|
||||
@ -21936,6 +22248,133 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize offset,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize size,
|
||||
VULKAN_HPP_NAMESPACE::IndexType indexType,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkCmdBindIndexBuffer2KHR( m_commandBuffer,
|
||||
static_cast<VkBuffer>( buffer ),
|
||||
static_cast<VkDeviceSize>( offset ),
|
||||
static_cast<VkDeviceSize>( size ),
|
||||
static_cast<VkIndexType>( indexType ) );
|
||||
}
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,
|
||||
VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkGetRenderingAreaGranularityKHR(
|
||||
m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
|
||||
}
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
|
||||
Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::Extent2D granularity;
|
||||
d.vkGetRenderingAreaGranularityKHR(
|
||||
m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) );
|
||||
|
||||
return granularity;
|
||||
}
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkGetDeviceImageSubresourceLayoutKHR(
|
||||
m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
|
||||
}
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
|
||||
d.vkGetDeviceImageSubresourceLayoutKHR(
|
||||
m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return layout;
|
||||
}
|
||||
|
||||
template <typename X, typename Y, typename... Z, typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
|
||||
d.vkGetDeviceImageSubresourceLayoutKHR(
|
||||
m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return structureChain;
|
||||
}
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
|
||||
Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
d.vkGetImageSubresourceLayout2KHR( m_device,
|
||||
static_cast<VkImage>( image ),
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
|
||||
}
|
||||
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR(
|
||||
VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
|
||||
d.vkGetImageSubresourceLayout2KHR( m_device,
|
||||
static_cast<VkImage>( image ),
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return layout;
|
||||
}
|
||||
|
||||
template <typename X, typename Y, typename... Z, typename Dispatch>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR(
|
||||
VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
|
||||
d.vkGetImageSubresourceLayout2KHR( m_device,
|
||||
static_cast<VkImage>( image ),
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return structureChain;
|
||||
}
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
template <typename Dispatch>
|
||||
|
@ -830,6 +830,18 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
struct AndroidHardwareBufferFormatProperties2ANDROID;
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
struct PhysicalDeviceShaderEnqueueFeaturesAMDX;
|
||||
struct PhysicalDeviceShaderEnqueuePropertiesAMDX;
|
||||
struct ExecutionGraphPipelineScratchSizeAMDX;
|
||||
struct ExecutionGraphPipelineCreateInfoAMDX;
|
||||
struct DispatchGraphInfoAMDX;
|
||||
struct DispatchGraphCountInfoAMDX;
|
||||
struct PipelineShaderStageNodeCreateInfoAMDX;
|
||||
union DeviceOrHostAddressConstAMDX;
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
struct SampleLocationEXT;
|
||||
struct SampleLocationsInfoEXT;
|
||||
@ -1153,8 +1165,6 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
struct HostImageLayoutTransitionInfoEXT;
|
||||
struct SubresourceHostMemcpySizeEXT;
|
||||
struct HostImageCopyDevicePerformanceQueryEXT;
|
||||
struct SubresourceLayout2EXT;
|
||||
struct ImageSubresource2EXT;
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
struct MemoryMapInfoKHR;
|
||||
@ -1599,6 +1609,18 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
//=== VK_EXT_pipeline_protected_access ===
|
||||
struct PhysicalDevicePipelineProtectedAccessFeaturesEXT;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
struct PhysicalDeviceMaintenance5FeaturesKHR;
|
||||
struct PhysicalDeviceMaintenance5PropertiesKHR;
|
||||
struct RenderingAreaInfoKHR;
|
||||
struct DeviceImageSubresourceInfoKHR;
|
||||
struct ImageSubresource2KHR;
|
||||
using ImageSubresource2EXT = ImageSubresource2KHR;
|
||||
struct SubresourceLayout2KHR;
|
||||
using SubresourceLayout2EXT = SubresourceLayout2KHR;
|
||||
struct PipelineCreateFlags2CreateInfoKHR;
|
||||
struct BufferUsageFlags2CreateInfoKHR;
|
||||
|
||||
//=== VK_KHR_ray_tracing_position_fetch ===
|
||||
struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
|
||||
|
||||
@ -5199,6 +5221,41 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
@ -6279,6 +6336,15 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize offset,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize size,
|
||||
VULKAN_HPP_NAMESPACE::IndexType indexType,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
@ -11024,6 +11090,94 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD Result createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
uint32_t createInfoCount,
|
||||
const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
|
||||
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
|
||||
VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
|
||||
createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename PipelineAllocator = std::allocator<VULKAN_HPP_NAMESPACE::Pipeline>,
|
||||
typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
|
||||
typename B0 = PipelineAllocator,
|
||||
typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type = 0>
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
|
||||
createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
PipelineAllocator & pipelineAllocator,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
|
||||
createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
# ifndef VULKAN_HPP_NO_SMART_HANDLE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
|
||||
typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>>
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
|
||||
createExecutionGraphPipelinesAMDXUnique(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
|
||||
typename PipelineAllocator = std::allocator<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>,
|
||||
typename B0 = PipelineAllocator,
|
||||
typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
|
||||
VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
|
||||
createExecutionGraphPipelinesAMDXUnique(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
|
||||
PipelineAllocator & pipelineAllocator,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> createExecutionGraphPipelineAMDXUnique(
|
||||
VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
|
||||
const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
|
||||
Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD Result
|
||||
getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type
|
||||
getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD Result getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
|
||||
const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
|
||||
uint32_t * pNodeIndex,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD typename ResultValueType<uint32_t>::type
|
||||
getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
|
||||
const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_KHR_get_memory_requirements2 ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
@ -12108,19 +12262,19 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
@ -12957,6 +13111,52 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
|
||||
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,
|
||||
VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
|
||||
getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
|
||||
const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource,
|
||||
Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
|
||||
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
|
||||
|
@ -1921,6 +1921,19 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR const & bufferUsageFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.usage );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>
|
||||
{
|
||||
@ -3743,6 +3756,33 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR const & imageSubresource2KHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.imageSubresource );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR const & deviceImageSubresourceInfoKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pCreateInfo );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pSubresource );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>
|
||||
{
|
||||
@ -4241,6 +4281,57 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & executionGraphPipelineCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.flags );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.stageCount );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pStages );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pLibraryInfo );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.layout );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineHandle );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineIndex );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const & executionGraphPipelineScratchSizeAMDX ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.size );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>
|
||||
{
|
||||
@ -5631,19 +5722,6 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const & imageSubresource2EXT ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.imageSubresource );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>
|
||||
{
|
||||
@ -8976,6 +9054,39 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR const & physicalDeviceMaintenance5FeaturesKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.maintenance5 );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR const & physicalDeviceMaintenance5PropertiesKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentMultisampleCoverageAfterSampleCounting );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentSampleMaskTestBeforeSampleCounting );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.depthStencilSwizzleOneSupport );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.polygonModePointSize );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictSinglePixelWideLinesUseParallelogram );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictWideLinesUseParallelogram );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>
|
||||
{
|
||||
@ -10335,6 +10446,42 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const & physicalDeviceShaderEnqueueFeaturesAMDX ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const & physicalDeviceShaderEnqueuePropertiesAMDX ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphDepth );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderOutputNodes );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>
|
||||
{
|
||||
@ -11543,6 +11690,19 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR const & pipelineCreateFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.flags );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>
|
||||
{
|
||||
@ -11741,20 +11901,6 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>
|
||||
{
|
||||
@ -11941,6 +12087,26 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>
|
||||
{
|
||||
std::size_t
|
||||
operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const & pipelineShaderStageNodeCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.pNext );
|
||||
for ( const char * p = pipelineShaderStageNodeCreateInfoAMDX.pName; *p != '\0'; ++p )
|
||||
{
|
||||
VULKAN_HPP_HASH_COMBINE( seed, *p );
|
||||
}
|
||||
VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.index );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>
|
||||
{
|
||||
@ -12868,6 +13034,23 @@ namespace std
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR const & renderingAreaInfoKHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.viewMask );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.colorAttachmentCount );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pColorAttachmentFormats );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.depthAttachmentFormat );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.stencilAttachmentFormat );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>
|
||||
{
|
||||
@ -13579,14 +13762,14 @@ namespace std
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>
|
||||
struct hash<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>
|
||||
{
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT ) const VULKAN_HPP_NOEXCEPT
|
||||
std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR const & subresourceLayout2KHR ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.subresourceLayout );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.sType );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.pNext );
|
||||
VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.subresourceLayout );
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
|
@ -1068,6 +1068,20 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
|
||||
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) );
|
||||
vkGetExecutionGraphPipelineScratchSizeAMDX =
|
||||
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
|
||||
vkGetExecutionGraphPipelineNodeIndexAMDX =
|
||||
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
|
||||
vkCmdInitializeGraphScratchMemoryAMDX =
|
||||
PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
|
||||
vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) );
|
||||
vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) );
|
||||
vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) );
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
|
||||
|
||||
@ -1337,6 +1351,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
|
||||
vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
|
||||
vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
|
||||
if ( !vkGetImageSubresourceLayout2KHR )
|
||||
vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
|
||||
@ -1629,6 +1645,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
|
||||
vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) );
|
||||
vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) );
|
||||
vkGetDeviceImageSubresourceLayoutKHR =
|
||||
PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) );
|
||||
vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) );
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
|
||||
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
|
||||
@ -2030,6 +2053,25 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
|
||||
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0;
|
||||
PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0;
|
||||
PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0;
|
||||
PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0;
|
||||
PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0;
|
||||
PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0;
|
||||
PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0;
|
||||
# else
|
||||
PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0;
|
||||
PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0;
|
||||
PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0;
|
||||
PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0;
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
|
||||
|
||||
@ -2449,6 +2491,12 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
|
||||
PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0;
|
||||
PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0;
|
||||
PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0;
|
||||
PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0;
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
|
||||
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
|
||||
@ -3847,6 +3895,20 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const;
|
||||
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createExecutionGraphPipelinesAMDX(
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createExecutionGraphPipelineAMDX(
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_KHR_get_memory_requirements2 ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
|
||||
@ -4273,6 +4335,18 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
|
||||
getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
template <typename X, typename Y, typename... Z>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
|
||||
@ -5580,6 +5654,21 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
|
||||
void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
@ -6042,6 +6131,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
|
||||
const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize offset,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize size,
|
||||
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
|
||||
@ -8233,12 +8329,21 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
|
||||
//=== VK_EXT_host_image_copy ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
|
||||
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
template <typename X, typename Y, typename... Z>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT;
|
||||
getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
template <typename X, typename Y, typename... Z>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
|
||||
|
||||
private:
|
||||
VULKAN_HPP_NAMESPACE::Device m_device = {};
|
||||
@ -9015,6 +9120,30 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
}
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||
: m_device( *device )
|
||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||
, m_dispatcher( device.getDispatcher() )
|
||||
{
|
||||
m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
|
||||
getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( *device ),
|
||||
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
|
||||
1,
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
|
||||
reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
|
||||
if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
|
||||
( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
|
||||
{
|
||||
detail::throwResultException( m_constructorSuccessCode, "vkCreateExecutionGraphPipelinesAMDX" );
|
||||
}
|
||||
}
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
|
||||
@ -9189,6 +9318,14 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
VULKAN_HPP_NODISCARD std::vector<uint8_t> getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
|
||||
VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const;
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const;
|
||||
|
||||
VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const;
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_KHR_ray_tracing_pipeline ===
|
||||
|
||||
template <typename DataType>
|
||||
@ -9256,6 +9393,36 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
}
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||
{
|
||||
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
|
||||
std::vector<VkPipeline> pipelines( createInfos.size() );
|
||||
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateExecutionGraphPipelinesAMDX(
|
||||
static_cast<VkDevice>( *device ),
|
||||
pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
|
||||
createInfos.size(),
|
||||
reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
|
||||
reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
|
||||
pipelines.data() ) );
|
||||
if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
|
||||
{
|
||||
this->reserve( createInfos.size() );
|
||||
for ( auto const & pipeline : pipelines )
|
||||
{
|
||||
this->emplace_back( device, pipeline, allocator, result );
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
detail::throwResultException( result, "vkCreateExecutionGraphPipelinesAMDX" );
|
||||
}
|
||||
}
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
|
||||
@ -16434,6 +16601,95 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
# endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
# if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createExecutionGraphPipelinesAMDX(
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
|
||||
{
|
||||
return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createExecutionGraphPipelineAMDX(
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
|
||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
|
||||
{
|
||||
return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX &&
|
||||
"Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
|
||||
VkResult result = getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX(
|
||||
static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" );
|
||||
|
||||
return sizeInfo;
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
|
||||
Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX &&
|
||||
"Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" );
|
||||
|
||||
uint32_t nodeIndex;
|
||||
VkResult result =
|
||||
getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ),
|
||||
static_cast<VkPipeline>( m_pipeline ),
|
||||
reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ),
|
||||
&nodeIndex );
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" );
|
||||
|
||||
return nodeIndex;
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX &&
|
||||
"Function <vkCmdInitializeGraphScratchMemoryAMDX> requires <VK_AMDX_shader_enqueue>" );
|
||||
|
||||
getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ) );
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" );
|
||||
|
||||
getDispatcher()->vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
|
||||
static_cast<VkDeviceAddress>( scratch ),
|
||||
reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE void
|
||||
CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" );
|
||||
|
||||
getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
|
||||
static_cast<VkDeviceAddress>( scratch ),
|
||||
reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
|
||||
VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX &&
|
||||
"Function <vkCmdDispatchGraphIndirectCountAMDX> requires <VK_AMDX_shader_enqueue>" );
|
||||
|
||||
getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX(
|
||||
static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
|
||||
}
|
||||
# endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
|
||||
VULKAN_HPP_INLINE void
|
||||
@ -18384,34 +18640,36 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT
|
||||
Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
|
||||
"Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control>" );
|
||||
VULKAN_HPP_ASSERT(
|
||||
getDispatcher()->vkGetImageSubresourceLayout2EXT &&
|
||||
"Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
|
||||
getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
|
||||
static_cast<VkImage>( m_image ),
|
||||
reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return layout;
|
||||
}
|
||||
|
||||
template <typename X, typename Y, typename... Z>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT
|
||||
Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT &&
|
||||
"Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control>" );
|
||||
VULKAN_HPP_ASSERT(
|
||||
getDispatcher()->vkGetImageSubresourceLayout2EXT &&
|
||||
"Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
|
||||
getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
|
||||
static_cast<VkImage>( m_image ),
|
||||
reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return structureChain;
|
||||
}
|
||||
@ -20333,6 +20591,99 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
|
||||
}
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize offset,
|
||||
VULKAN_HPP_NAMESPACE::DeviceSize size,
|
||||
VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function <vkCmdBindIndexBuffer2KHR> requires <VK_KHR_maintenance5>" );
|
||||
|
||||
getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
|
||||
static_cast<VkBuffer>( buffer ),
|
||||
static_cast<VkDeviceSize>( offset ),
|
||||
static_cast<VkDeviceSize>( size ),
|
||||
static_cast<VkIndexType>( indexType ) );
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
|
||||
Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::Extent2D granularity;
|
||||
getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast<VkDevice>( m_device ),
|
||||
reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ),
|
||||
reinterpret_cast<VkExtent2D *>( &granularity ) );
|
||||
|
||||
return granularity;
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
|
||||
"Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
|
||||
getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
|
||||
reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return layout;
|
||||
}
|
||||
|
||||
template <typename X, typename Y, typename... Z>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
|
||||
"Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
|
||||
getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
|
||||
reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return structureChain;
|
||||
}
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
|
||||
Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT(
|
||||
getDispatcher()->vkGetImageSubresourceLayout2KHR &&
|
||||
"Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
|
||||
getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
|
||||
static_cast<VkImage>( m_image ),
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return layout;
|
||||
}
|
||||
|
||||
template <typename X, typename Y, typename... Z>
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
|
||||
Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
|
||||
{
|
||||
VULKAN_HPP_ASSERT(
|
||||
getDispatcher()->vkGetImageSubresourceLayout2KHR &&
|
||||
"Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
|
||||
|
||||
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
|
||||
VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
|
||||
getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
|
||||
static_cast<VkImage>( m_image ),
|
||||
reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
|
||||
reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
|
||||
|
||||
return structureChain;
|
||||
}
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
|
||||
|
@ -3220,6 +3220,63 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPAC
|
||||
"AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
//=== VK_AMDX_shader_enqueue ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueueFeaturesAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>::value,
|
||||
"PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>::value,
|
||||
"PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::value,
|
||||
"ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>::value,
|
||||
"ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX>::value,
|
||||
"DispatchGraphInfoAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX>::value,
|
||||
"DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>::value,
|
||||
"PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX>::value,
|
||||
"DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" );
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
|
||||
//=== VK_EXT_sample_locations ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
|
||||
@ -4623,17 +4680,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HostImag
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>::value,
|
||||
"HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>::value,
|
||||
"SubresourceLayout2EXT is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2EXT>::value,
|
||||
"ImageSubresource2EXT is not nothrow_move_constructible!" );
|
||||
|
||||
//=== VK_KHR_map_memory2 ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" );
|
||||
@ -6567,6 +6613,56 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Physical
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
|
||||
"PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance5FeaturesKHR ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>::value,
|
||||
"PhysicalDeviceMaintenance5FeaturesKHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance5PropertiesKHR ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>::value,
|
||||
"struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>::value,
|
||||
"PhysicalDeviceMaintenance5PropertiesKHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR ) == sizeof( VkRenderingAreaInfoKHR ), "struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>::value,
|
||||
"RenderingAreaInfoKHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR ) == sizeof( VkDeviceImageSubresourceInfoKHR ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>::value,
|
||||
"DeviceImageSubresourceInfoKHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR ) == sizeof( VkImageSubresource2KHR ), "struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>::value,
|
||||
"ImageSubresource2KHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR ) == sizeof( VkSubresourceLayout2KHR ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>::value,
|
||||
"SubresourceLayout2KHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR ) == sizeof( VkPipelineCreateFlags2CreateInfoKHR ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>::value,
|
||||
"PipelineCreateFlags2CreateInfoKHR is not nothrow_move_constructible!" );
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR ) == sizeof( VkBufferUsageFlags2CreateInfoKHR ),
|
||||
"struct and wrapper have different size!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
||||
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>::value,
|
||||
"BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" );
|
||||
|
||||
//=== VK_KHR_ray_tracing_position_fetch ===
|
||||
|
||||
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) ==
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -601,6 +601,10 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
result += "TransformFeedbackCounterBufferEXT | ";
|
||||
if ( value & BufferUsageFlagBits::eConditionalRenderingEXT )
|
||||
result += "ConditionalRenderingEXT | ";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX )
|
||||
result += "ExecutionGraphScratchAMDX | ";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR )
|
||||
result += "AccelerationStructureBuildInputReadOnlyKHR | ";
|
||||
if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR )
|
||||
@ -3304,6 +3308,148 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
|
||||
}
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2KHR value )
|
||||
{
|
||||
if ( !value )
|
||||
return "{}";
|
||||
|
||||
std::string result;
|
||||
if ( value & PipelineCreateFlagBits2KHR::eDisableOptimization )
|
||||
result += "DisableOptimization | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eAllowDerivatives )
|
||||
result += "AllowDerivatives | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eDerivative )
|
||||
result += "Derivative | ";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
if ( value & PipelineCreateFlagBits2KHR::eReserved28NV )
|
||||
result += "Reserved28NV | ";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex )
|
||||
result += "ViewIndexFromDeviceIndex | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eDispatchBase )
|
||||
result += "DispatchBase | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eDeferCompile )
|
||||
result += "DeferCompile | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eCaptureStatistics )
|
||||
result += "CaptureStatistics | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations )
|
||||
result += "CaptureInternalRepresentations | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired )
|
||||
result += "FailOnPipelineCompileRequired | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure )
|
||||
result += "EarlyReturnOnFailure | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimization )
|
||||
result += "LinkTimeOptimization | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo )
|
||||
result += "RetainLinkTimeOptimizationInfo | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eLibrary )
|
||||
result += "Library | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles )
|
||||
result += "RayTracingSkipTriangles | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs )
|
||||
result += "RayTracingSkipAabbs | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders )
|
||||
result += "RayTracingNoNullAnyHitShaders | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders )
|
||||
result += "RayTracingNoNullClosestHitShaders | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders )
|
||||
result += "RayTracingNoNullMissShaders | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders )
|
||||
result += "RayTracingNoNullIntersectionShaders | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay )
|
||||
result += "RayTracingShaderGroupHandleCaptureReplay | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eIndirectBindable )
|
||||
result += "IndirectBindable | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotion )
|
||||
result += "RayTracingAllowMotion | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment )
|
||||
result += "RenderingFragmentShadingRateAttachment | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment )
|
||||
result += "RenderingFragmentDensityMapAttachment | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap )
|
||||
result += "RayTracingOpacityMicromap | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop )
|
||||
result += "ColorAttachmentFeedbackLoop | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop )
|
||||
result += "DepthStencilAttachmentFeedbackLoop | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccess )
|
||||
result += "NoProtectedAccess | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnly )
|
||||
result += "ProtectedAccessOnly | ";
|
||||
if ( value & PipelineCreateFlagBits2KHR::eDescriptorBuffer )
|
||||
result += "DescriptorBuffer | ";
|
||||
|
||||
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2KHR value )
|
||||
{
|
||||
if ( !value )
|
||||
return "{}";
|
||||
|
||||
std::string result;
|
||||
if ( value & BufferUsageFlagBits2KHR::eTransferSrc )
|
||||
result += "TransferSrc | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eTransferDst )
|
||||
result += "TransferDst | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eUniformTexelBuffer )
|
||||
result += "UniformTexelBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eStorageTexelBuffer )
|
||||
result += "StorageTexelBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eUniformBuffer )
|
||||
result += "UniformBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eStorageBuffer )
|
||||
result += "StorageBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eIndexBuffer )
|
||||
result += "IndexBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eVertexBuffer )
|
||||
result += "VertexBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eIndirectBuffer )
|
||||
result += "IndirectBuffer | ";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
if ( value & BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX )
|
||||
result += "ExecutionGraphScratchAMDX | ";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
if ( value & BufferUsageFlagBits2KHR::eConditionalRendering )
|
||||
result += "ConditionalRendering | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eShaderBindingTable )
|
||||
result += "ShaderBindingTable | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBuffer )
|
||||
result += "TransformFeedbackBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer )
|
||||
result += "TransformFeedbackCounterBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eVideoDecodeSrc )
|
||||
result += "VideoDecodeSrc | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eVideoDecodeDst )
|
||||
result += "VideoDecodeDst | ";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
if ( value & BufferUsageFlagBits2KHR::eVideoEncodeDst )
|
||||
result += "VideoEncodeDst | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eVideoEncodeSrc )
|
||||
result += "VideoEncodeSrc | ";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
if ( value & BufferUsageFlagBits2KHR::eShaderDeviceAddress )
|
||||
result += "ShaderDeviceAddress | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly )
|
||||
result += "AccelerationStructureBuildInputReadOnly | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureStorage )
|
||||
result += "AccelerationStructureStorage | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer )
|
||||
result += "SamplerDescriptorBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBuffer )
|
||||
result += "ResourceDescriptorBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer )
|
||||
result += "PushDescriptorsDescriptorBuffer | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly )
|
||||
result += "MicromapBuildInputReadOnly | ";
|
||||
if ( value & BufferUsageFlagBits2KHR::eMicromapStorage )
|
||||
result += "MicromapStorage | ";
|
||||
|
||||
return "{ " + result.substr( 0, result.size() - 3 ) + " }";
|
||||
}
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value )
|
||||
@ -3831,6 +3977,13 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
|
||||
case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID";
|
||||
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
case StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX: return "PhysicalDeviceShaderEnqueueFeaturesAMDX";
|
||||
case StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX: return "PhysicalDeviceShaderEnqueuePropertiesAMDX";
|
||||
case StructureType::eExecutionGraphPipelineScratchSizeAMDX: return "ExecutionGraphPipelineScratchSizeAMDX";
|
||||
case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX";
|
||||
case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
|
||||
case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
|
||||
case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT";
|
||||
@ -4110,8 +4263,6 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
|
||||
case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT";
|
||||
case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT";
|
||||
case StructureType::eSubresourceLayout2EXT: return "SubresourceLayout2EXT";
|
||||
case StructureType::eImageSubresource2EXT: return "ImageSubresource2EXT";
|
||||
case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT";
|
||||
case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT";
|
||||
case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT";
|
||||
@ -4240,6 +4391,14 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV";
|
||||
case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT";
|
||||
case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT";
|
||||
case StructureType::ePhysicalDeviceMaintenance5FeaturesKHR: return "PhysicalDeviceMaintenance5FeaturesKHR";
|
||||
case StructureType::ePhysicalDeviceMaintenance5PropertiesKHR: return "PhysicalDeviceMaintenance5PropertiesKHR";
|
||||
case StructureType::eRenderingAreaInfoKHR: return "RenderingAreaInfoKHR";
|
||||
case StructureType::eDeviceImageSubresourceInfoKHR: return "DeviceImageSubresourceInfoKHR";
|
||||
case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR";
|
||||
case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR";
|
||||
case StructureType::ePipelineCreateFlags2CreateInfoKHR: return "PipelineCreateFlags2CreateInfoKHR";
|
||||
case StructureType::eBufferUsageFlags2CreateInfoKHR: return "BufferUsageFlags2CreateInfoKHR";
|
||||
case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR";
|
||||
case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT";
|
||||
case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT";
|
||||
@ -4609,6 +4768,8 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
|
||||
case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
|
||||
case Format::eR16G16S105NV: return "R16G16S105NV";
|
||||
case Format::eA1B5G5R5UnormPack16KHR: return "A1B5G5R5UnormPack16KHR";
|
||||
case Format::eA8UnormKHR: return "A8UnormKHR";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
@ -5047,6 +5208,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT";
|
||||
case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT";
|
||||
case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR";
|
||||
case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR";
|
||||
case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR";
|
||||
@ -5785,6 +5949,9 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
{
|
||||
case PipelineBindPoint::eGraphics: return "Graphics";
|
||||
case PipelineBindPoint::eCompute: return "Compute";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
case PipelineBindPoint::eExecutionGraphAMDX: return "ExecutionGraphAMDX";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR";
|
||||
case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
@ -8517,6 +8684,87 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
}
|
||||
}
|
||||
|
||||
//=== VK_KHR_maintenance5 ===
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2KHR value )
|
||||
{
|
||||
switch ( value )
|
||||
{
|
||||
case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization";
|
||||
case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives";
|
||||
case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
case PipelineCreateFlagBits2KHR::eReserved28NV: return "Reserved28NV";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
|
||||
case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase";
|
||||
case PipelineCreateFlagBits2KHR::eDeferCompile: return "DeferCompile";
|
||||
case PipelineCreateFlagBits2KHR::eCaptureStatistics: return "CaptureStatistics";
|
||||
case PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations: return "CaptureInternalRepresentations";
|
||||
case PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired";
|
||||
case PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure: return "EarlyReturnOnFailure";
|
||||
case PipelineCreateFlagBits2KHR::eLinkTimeOptimization: return "LinkTimeOptimization";
|
||||
case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo: return "RetainLinkTimeOptimizationInfo";
|
||||
case PipelineCreateFlagBits2KHR::eLibrary: return "Library";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles: return "RayTracingSkipTriangles";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs: return "RayTracingSkipAabbs";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders: return "RayTracingNoNullAnyHitShaders";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders: return "RayTracingNoNullClosestHitShaders";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders: return "RayTracingNoNullMissShaders";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders: return "RayTracingNoNullIntersectionShaders";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay: return "RayTracingShaderGroupHandleCaptureReplay";
|
||||
case PipelineCreateFlagBits2KHR::eIndirectBindable: return "IndirectBindable";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingAllowMotion: return "RayTracingAllowMotion";
|
||||
case PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment: return "RenderingFragmentShadingRateAttachment";
|
||||
case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment: return "RenderingFragmentDensityMapAttachment";
|
||||
case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap: return "RayTracingOpacityMicromap";
|
||||
case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop: return "ColorAttachmentFeedbackLoop";
|
||||
case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop: return "DepthStencilAttachmentFeedbackLoop";
|
||||
case PipelineCreateFlagBits2KHR::eNoProtectedAccess: return "NoProtectedAccess";
|
||||
case PipelineCreateFlagBits2KHR::eProtectedAccessOnly: return "ProtectedAccessOnly";
|
||||
case PipelineCreateFlagBits2KHR::eDescriptorBuffer: return "DescriptorBuffer";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2KHR value )
|
||||
{
|
||||
switch ( value )
|
||||
{
|
||||
case BufferUsageFlagBits2KHR::eTransferSrc: return "TransferSrc";
|
||||
case BufferUsageFlagBits2KHR::eTransferDst: return "TransferDst";
|
||||
case BufferUsageFlagBits2KHR::eUniformTexelBuffer: return "UniformTexelBuffer";
|
||||
case BufferUsageFlagBits2KHR::eStorageTexelBuffer: return "StorageTexelBuffer";
|
||||
case BufferUsageFlagBits2KHR::eUniformBuffer: return "UniformBuffer";
|
||||
case BufferUsageFlagBits2KHR::eStorageBuffer: return "StorageBuffer";
|
||||
case BufferUsageFlagBits2KHR::eIndexBuffer: return "IndexBuffer";
|
||||
case BufferUsageFlagBits2KHR::eVertexBuffer: return "VertexBuffer";
|
||||
case BufferUsageFlagBits2KHR::eIndirectBuffer: return "IndirectBuffer";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
case BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
case BufferUsageFlagBits2KHR::eConditionalRendering: return "ConditionalRendering";
|
||||
case BufferUsageFlagBits2KHR::eShaderBindingTable: return "ShaderBindingTable";
|
||||
case BufferUsageFlagBits2KHR::eTransformFeedbackBuffer: return "TransformFeedbackBuffer";
|
||||
case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer: return "TransformFeedbackCounterBuffer";
|
||||
case BufferUsageFlagBits2KHR::eVideoDecodeSrc: return "VideoDecodeSrc";
|
||||
case BufferUsageFlagBits2KHR::eVideoDecodeDst: return "VideoDecodeDst";
|
||||
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
||||
case BufferUsageFlagBits2KHR::eVideoEncodeDst: return "VideoEncodeDst";
|
||||
case BufferUsageFlagBits2KHR::eVideoEncodeSrc: return "VideoEncodeSrc";
|
||||
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
||||
case BufferUsageFlagBits2KHR::eShaderDeviceAddress: return "ShaderDeviceAddress";
|
||||
case BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly: return "AccelerationStructureBuildInputReadOnly";
|
||||
case BufferUsageFlagBits2KHR::eAccelerationStructureStorage: return "AccelerationStructureStorage";
|
||||
case BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer: return "SamplerDescriptorBuffer";
|
||||
case BufferUsageFlagBits2KHR::eResourceDescriptorBuffer: return "ResourceDescriptorBuffer";
|
||||
case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer: return "PushDescriptorsDescriptorBuffer";
|
||||
case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly: return "MicromapBuildInputReadOnly";
|
||||
case BufferUsageFlagBits2KHR::eMicromapStorage: return "MicromapStorage";
|
||||
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
|
||||
}
|
||||
}
|
||||
|
||||
//=== VK_EXT_shader_object ===
|
||||
|
||||
VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value )
|
||||
|
@ -5450,6 +5450,15 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PipelineCreationFeedbackCreateInfo, ExecutionGraphPipelineCreateInfoAMDX>
|
||||
{
|
||||
enum
|
||||
{
|
||||
value = true
|
||||
};
|
||||
};
|
||||
|
||||
template <>
|
||||
struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, PhysicalDeviceFeatures2>
|
||||
{
|
||||
@ -7039,7 +7048,7 @@ namespace VULKAN_HPP_NAMESPACE
|
||||
# elif defined( __APPLE__ )
|
||||
m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
|
||||
# elif defined( _WIN32 )
|
||||
m_library = ::LoadLibraryA( "vulkan-1.dll" );
|
||||
m_library = ::LoadLibraryA( "vulkan-1.dll" );
|
||||
# else
|
||||
# error unsupported platform
|
||||
# endif
|
||||
|
Loading…
Reference in New Issue
Block a user