mirror of
https://github.com/KhronosGroup/Vulkan-Hpp.git
synced 2024-10-14 16:32:17 +00:00
Introduce implicit cast operators for vk::UniqueHandles, vk::SharedHandles and vk::raii::Handles (#1771)
This commit is contained in:
parent
e4ea505027
commit
3e6fed9492
@ -43,7 +43,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::CommandPool commandPool( device, commandPoolCreateInfo );
|
vk::raii::CommandPool commandPool( device, commandPoolCreateInfo );
|
||||||
|
|
||||||
// allocate a CommandBuffer from the CommandPool
|
// allocate a CommandBuffer from the CommandPool
|
||||||
vk::CommandBufferAllocateInfo commandBufferAllocateInfo( *commandPool, vk::CommandBufferLevel::ePrimary, 1 );
|
vk::CommandBufferAllocateInfo commandBufferAllocateInfo( commandPool, vk::CommandBufferLevel::ePrimary, 1 );
|
||||||
vk::raii::CommandBuffer commandBuffer = std::move( vk::raii::CommandBuffers( device, commandBufferAllocateInfo ).front() );
|
vk::raii::CommandBuffer commandBuffer = std::move( vk::raii::CommandBuffers( device, commandBufferAllocateInfo ).front() );
|
||||||
|
|
||||||
/* VULKAN_HPP_KEY_END */
|
/* VULKAN_HPP_KEY_END */
|
||||||
|
@ -47,7 +47,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
// determine a queueFamilyIndex that suports present
|
// determine a queueFamilyIndex that suports present
|
||||||
// first check if the graphicsQueueFamiliyIndex is good enough
|
// first check if the graphicsQueueFamiliyIndex is good enough
|
||||||
uint32_t presentQueueFamilyIndex = physicalDevice.getSurfaceSupportKHR( graphicsQueueFamilyIndex, *surface )
|
uint32_t presentQueueFamilyIndex = physicalDevice.getSurfaceSupportKHR( graphicsQueueFamilyIndex, surface )
|
||||||
? graphicsQueueFamilyIndex
|
? graphicsQueueFamilyIndex
|
||||||
: vk::su::checked_cast<uint32_t>( queueFamilyProperties.size() );
|
: vk::su::checked_cast<uint32_t>( queueFamilyProperties.size() );
|
||||||
if ( presentQueueFamilyIndex == queueFamilyProperties.size() )
|
if ( presentQueueFamilyIndex == queueFamilyProperties.size() )
|
||||||
@ -57,7 +57,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
||||||
{
|
{
|
||||||
if ( ( queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics ) &&
|
if ( ( queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics ) &&
|
||||||
physicalDevice.getSurfaceSupportKHR( vk::su::checked_cast<uint32_t>( i ), *surface ) )
|
physicalDevice.getSurfaceSupportKHR( vk::su::checked_cast<uint32_t>( i ), surface ) )
|
||||||
{
|
{
|
||||||
graphicsQueueFamilyIndex = vk::su::checked_cast<uint32_t>( i );
|
graphicsQueueFamilyIndex = vk::su::checked_cast<uint32_t>( i );
|
||||||
presentQueueFamilyIndex = graphicsQueueFamilyIndex;
|
presentQueueFamilyIndex = graphicsQueueFamilyIndex;
|
||||||
@ -70,7 +70,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// family index that supports present
|
// family index that supports present
|
||||||
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
||||||
{
|
{
|
||||||
if ( physicalDevice.getSurfaceSupportKHR( vk::su::checked_cast<uint32_t>( i ), *surface ) )
|
if ( physicalDevice.getSurfaceSupportKHR( vk::su::checked_cast<uint32_t>( i ), surface ) )
|
||||||
{
|
{
|
||||||
presentQueueFamilyIndex = vk::su::checked_cast<uint32_t>( i );
|
presentQueueFamilyIndex = vk::su::checked_cast<uint32_t>( i );
|
||||||
break;
|
break;
|
||||||
@ -87,11 +87,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Device device = vk::raii::su::makeDevice( physicalDevice, graphicsQueueFamilyIndex, vk::su::getDeviceExtensions() );
|
vk::raii::Device device = vk::raii::su::makeDevice( physicalDevice, graphicsQueueFamilyIndex, vk::su::getDeviceExtensions() );
|
||||||
|
|
||||||
// get the supported VkFormats
|
// get the supported VkFormats
|
||||||
std::vector<vk::SurfaceFormatKHR> formats = physicalDevice.getSurfaceFormatsKHR( *surface );
|
std::vector<vk::SurfaceFormatKHR> formats = physicalDevice.getSurfaceFormatsKHR( surface );
|
||||||
assert( !formats.empty() );
|
assert( !formats.empty() );
|
||||||
vk::Format format = ( formats[0].format == vk::Format::eUndefined ) ? vk::Format::eB8G8R8A8Unorm : formats[0].format;
|
vk::Format format = ( formats[0].format == vk::Format::eUndefined ) ? vk::Format::eB8G8R8A8Unorm : formats[0].format;
|
||||||
|
|
||||||
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( *surface );
|
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( surface );
|
||||||
vk::Extent2D swapchainExtent;
|
vk::Extent2D swapchainExtent;
|
||||||
if ( surfaceCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max() )
|
if ( surfaceCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max() )
|
||||||
{
|
{
|
||||||
@ -119,7 +119,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
: vk::CompositeAlphaFlagBitsKHR::eOpaque;
|
: vk::CompositeAlphaFlagBitsKHR::eOpaque;
|
||||||
|
|
||||||
vk::SwapchainCreateInfoKHR swapChainCreateInfo( vk::SwapchainCreateFlagsKHR(),
|
vk::SwapchainCreateInfoKHR swapChainCreateInfo( vk::SwapchainCreateFlagsKHR(),
|
||||||
*surface,
|
surface,
|
||||||
vk::su::clamp( 3u, surfaceCapabilities.minImageCount, surfaceCapabilities.maxImageCount ),
|
vk::su::clamp( 3u, surfaceCapabilities.minImageCount, surfaceCapabilities.maxImageCount ),
|
||||||
format,
|
format,
|
||||||
vk::ColorSpaceKHR::eSrgbNonlinear,
|
vk::ColorSpaceKHR::eSrgbNonlinear,
|
||||||
|
@ -87,9 +87,9 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, typeIndex );
|
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, typeIndex );
|
||||||
vk::raii::DeviceMemory depthMemory( device, memoryAllocateInfo );
|
vk::raii::DeviceMemory depthMemory( device, memoryAllocateInfo );
|
||||||
depthImage.bindMemory( *depthMemory, 0 );
|
depthImage.bindMemory( depthMemory, 0 );
|
||||||
|
|
||||||
vk::ImageViewCreateInfo imageViewCreateInfo( {}, *depthImage, vk::ImageViewType::e2D, depthFormat, {}, { vk::ImageAspectFlagBits::eDepth, 0, 1, 0, 1 } );
|
vk::ImageViewCreateInfo imageViewCreateInfo( {}, depthImage, vk::ImageViewType::e2D, depthFormat, {}, { vk::ImageAspectFlagBits::eDepth, 0, 1, 0, 1 } );
|
||||||
vk::raii::ImageView depthView( device, imageViewCreateInfo );
|
vk::raii::ImageView depthView( device, imageViewCreateInfo );
|
||||||
|
|
||||||
// while all vk::raii objects are automatically destroyed on scope leave, the Image should to be destroyed before the bound DeviceMemory
|
// while all vk::raii objects are automatically destroyed on scope leave, the Image should to be destroyed before the bound DeviceMemory
|
||||||
|
@ -76,7 +76,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
memcpy( pData, &mvpc, sizeof( mvpc ) );
|
memcpy( pData, &mvpc, sizeof( mvpc ) );
|
||||||
uniformDataMemory.unmapMemory();
|
uniformDataMemory.unmapMemory();
|
||||||
|
|
||||||
uniformDataBuffer.bindMemory( *uniformDataMemory, 0 );
|
uniformDataBuffer.bindMemory( uniformDataMemory, 0 );
|
||||||
|
|
||||||
// while all vk::raii objects are automatically destroyed on scope leave, the Buffer should to be destroyed before the bound DeviceMemory
|
// while all vk::raii objects are automatically destroyed on scope leave, the Buffer should to be destroyed before the bound DeviceMemory
|
||||||
// but the standard destruction order would destroy the DeviceMemory before the Buffer, so destroy the Buffer here
|
// but the standard destruction order would destroy the DeviceMemory before the Buffer, so destroy the Buffer here
|
||||||
|
@ -64,11 +64,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
||||||
|
|
||||||
// allocate a descriptor set
|
// allocate a descriptor set
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, *descriptorSetLayout );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, *descriptorSetLayout );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
||||||
|
|
||||||
vk::DescriptorBufferInfo descriptorBufferInfo( *uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
vk::DescriptorBufferInfo descriptorBufferInfo( uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
||||||
vk::WriteDescriptorSet writeDescriptorSet( *descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, descriptorBufferInfo );
|
vk::WriteDescriptorSet writeDescriptorSet( descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, descriptorBufferInfo );
|
||||||
device.updateDescriptorSets( writeDescriptorSet, nullptr );
|
device.updateDescriptorSets( writeDescriptorSet, nullptr );
|
||||||
|
|
||||||
/* VULKAN_HPP_KEY_END */
|
/* VULKAN_HPP_KEY_END */
|
||||||
|
@ -51,7 +51,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, surfaceData.surface );
|
vk::raii::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, surfaceData.surface );
|
||||||
vk::raii::Device device = vk::raii::su::makeDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() );
|
vk::raii::Device device = vk::raii::su::makeDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::Format depthFormat = vk::Format::eD16Unorm;
|
vk::Format depthFormat = vk::Format::eD16Unorm;
|
||||||
|
|
||||||
/* VULKAN_HPP_KEY_START */
|
/* VULKAN_HPP_KEY_START */
|
||||||
|
@ -55,14 +55,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
/* VULKAN_KEY_START */
|
/* VULKAN_KEY_START */
|
||||||
|
|
||||||
std::array<vk::ImageView, 2> attachments;
|
std::array<vk::ImageView, 2> attachments;
|
||||||
attachments[1] = *depthBufferData.imageView;
|
attachments[1] = depthBufferData.imageView;
|
||||||
|
|
||||||
std::vector<vk::raii::Framebuffer> framebuffers;
|
std::vector<vk::raii::Framebuffer> framebuffers;
|
||||||
framebuffers.reserve( swapChainData.imageViews.size() );
|
framebuffers.reserve( swapChainData.imageViews.size() );
|
||||||
for ( auto const & view : swapChainData.imageViews )
|
for ( auto const & view : swapChainData.imageViews )
|
||||||
{
|
{
|
||||||
attachments[0] = *view;
|
attachments[0] = view;
|
||||||
vk::FramebufferCreateInfo framebufferCreateInfo( {}, *renderPass, attachments, surfaceData.extent.width, surfaceData.extent.height, 1 );
|
vk::FramebufferCreateInfo framebufferCreateInfo( {}, renderPass, attachments, surfaceData.extent.width, surfaceData.extent.height, 1 );
|
||||||
framebuffers.push_back( vk::raii::Framebuffer( device, framebufferCreateInfo ) );
|
framebuffers.push_back( vk::raii::Framebuffer( device, framebufferCreateInfo ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,13 +94,13 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
deviceMemory.unmapMemory();
|
deviceMemory.unmapMemory();
|
||||||
|
|
||||||
// and bind the device memory to the vertex buffer
|
// and bind the device memory to the vertex buffer
|
||||||
vertexBuffer.bindMemory( *deviceMemory, 0 );
|
vertexBuffer.bindMemory( deviceMemory, 0 );
|
||||||
|
|
||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
|
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -110,10 +110,10 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
commandBuffer.begin( {} );
|
commandBuffer.begin( {} );
|
||||||
|
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBuffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBuffer }, { 0 } );
|
||||||
|
|
||||||
commandBuffer.endRenderPass();
|
commandBuffer.endRenderPass();
|
||||||
commandBuffer.end();
|
commandBuffer.end();
|
||||||
|
@ -53,7 +53,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, surfaceData.surface );
|
vk::raii::su::findGraphicsAndPresentQueueFamilyIndex( physicalDevice, surfaceData.surface );
|
||||||
vk::raii::Device device = vk::raii::su::makeDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() );
|
vk::raii::Device device = vk::raii::su::makeDevice( physicalDevice, graphicsAndPresentQueueFamilyIndex.first, vk::su::getDeviceExtensions() );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, vk::Format::eD16Unorm );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, vk::Format::eD16Unorm );
|
||||||
|
|
||||||
vk::raii::DescriptorSetLayout descriptorSetLayout =
|
vk::raii::DescriptorSetLayout descriptorSetLayout =
|
||||||
@ -68,8 +68,8 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
/* VULKAN_KEY_START */
|
/* VULKAN_KEY_START */
|
||||||
|
|
||||||
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
||||||
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eVertex, *vertexShaderModule, "main" ),
|
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eVertex, vertexShaderModule, "main" ),
|
||||||
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, *fragmentShaderModule, "main" )
|
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, fragmentShaderModule, "main" )
|
||||||
};
|
};
|
||||||
|
|
||||||
vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( coloredCubeData[0] ) );
|
vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( coloredCubeData[0] ) );
|
||||||
@ -147,8 +147,8 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
&pipelineDepthStencilStateCreateInfo, // pDepthStencilState
|
&pipelineDepthStencilStateCreateInfo, // pDepthStencilState
|
||||||
&pipelineColorBlendStateCreateInfo, // pColorBlendState
|
&pipelineColorBlendStateCreateInfo, // pColorBlendState
|
||||||
&pipelineDynamicStateCreateInfo, // pDynamicState
|
&pipelineDynamicStateCreateInfo, // pDynamicState
|
||||||
*pipelineLayout, // layout
|
pipelineLayout, // layout
|
||||||
*renderPass // renderPass
|
renderPass // renderPass
|
||||||
);
|
);
|
||||||
|
|
||||||
vk::raii::Pipeline pipeline( device, nullptr, graphicsPipelineCreateInfo );
|
vk::raii::Pipeline pipeline( device, nullptr, graphicsPipelineCreateInfo );
|
||||||
|
@ -79,7 +79,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::makeDescriptorSetLayout( device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
vk::raii::su::makeDescriptorSetLayout( device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -94,7 +94,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::copyToDevice( vertexBufferData.deviceMemory, coloredCubeData, sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) );
|
vk::raii::su::copyToDevice( vertexBufferData.deviceMemory, coloredCubeData, sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) );
|
||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 } } );
|
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
vk::raii::su::updateDescriptorSets(
|
vk::raii::su::updateDescriptorSets(
|
||||||
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, {} );
|
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, {} );
|
||||||
|
|
||||||
@ -120,7 +120,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -129,12 +129,12 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -149,7 +149,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -35,7 +35,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 640, 640 ) );
|
vk::raii::su::SurfaceData surfaceData( instance, AppName, vk::Extent2D( 640, 640 ) );
|
||||||
|
|
||||||
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( *surfaceData.surface );
|
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( surfaceData.surface );
|
||||||
if ( !( surfaceCapabilities.supportedUsageFlags & vk::ImageUsageFlagBits::eTransferDst ) )
|
if ( !( surfaceCapabilities.supportedUsageFlags & vk::ImageUsageFlagBits::eTransferDst ) )
|
||||||
{
|
{
|
||||||
std::cout << "Surface cannot be destination of blit - abort \n";
|
std::cout << "Surface cannot be destination of blit - abort \n";
|
||||||
@ -73,7 +73,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// Get the index of the next available swapchain image:
|
// Get the index of the next available swapchain image:
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -105,9 +105,9 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, memoryTypeIndex );
|
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, memoryTypeIndex );
|
||||||
deviceMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
deviceMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
||||||
blitSourceImage.bindMemory( *deviceMemory, 0 );
|
blitSourceImage.bindMemory( deviceMemory, 0 );
|
||||||
|
|
||||||
vk::raii::su::setImageLayout( commandBuffer, *blitSourceImage, swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eGeneral );
|
vk::raii::su::setImageLayout( commandBuffer, blitSourceImage, swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eGeneral );
|
||||||
|
|
||||||
commandBuffer.end();
|
commandBuffer.end();
|
||||||
|
|
||||||
@ -118,7 +118,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
graphicsQueue.submit( submitInfo, *commandFence );
|
graphicsQueue.submit( submitInfo, *commandFence );
|
||||||
|
|
||||||
/* Make sure command buffer is finished before mapping */
|
/* Make sure command buffer is finished before mapping */
|
||||||
while ( device.waitForFences( { *commandFence }, true, vk::su::FenceTimeout ) == vk::Result::eTimeout )
|
while ( device.waitForFences( { commandFence }, true, vk::su::FenceTimeout ) == vk::Result::eTimeout )
|
||||||
;
|
;
|
||||||
|
|
||||||
unsigned char * pImageMemory = static_cast<unsigned char *>( deviceMemory.mapMemory( 0, memoryRequirements.size ) );
|
unsigned char * pImageMemory = static_cast<unsigned char *>( deviceMemory.mapMemory( 0, memoryRequirements.size ) );
|
||||||
@ -138,7 +138,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Flush the mapped memory and then unmap it. Assume it isn't coherent since we didn't really confirm
|
// Flush the mapped memory and then unmap it. Assume it isn't coherent since we didn't really confirm
|
||||||
vk::MappedMemoryRange mappedMemoryRange( *deviceMemory, 0, memoryRequirements.size );
|
vk::MappedMemoryRange mappedMemoryRange( deviceMemory, 0, memoryRequirements.size );
|
||||||
device.flushMappedMemoryRanges( mappedMemoryRange );
|
device.flushMappedMemoryRanges( mappedMemoryRange );
|
||||||
deviceMemory.unmapMemory();
|
deviceMemory.unmapMemory();
|
||||||
|
|
||||||
@ -148,7 +148,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
||||||
|
|
||||||
// Intend to blit from this image, set the layout accordingly
|
// Intend to blit from this image, set the layout accordingly
|
||||||
vk::raii::su::setImageLayout( commandBuffer, *blitSourceImage, swapChainData.colorFormat, vk::ImageLayout::eGeneral, vk::ImageLayout::eTransferSrcOptimal );
|
vk::raii::su::setImageLayout( commandBuffer, blitSourceImage, swapChainData.colorFormat, vk::ImageLayout::eGeneral, vk::ImageLayout::eTransferSrcOptimal );
|
||||||
|
|
||||||
vk::Image blitDestinationImage = static_cast<vk::Image>( swapChainData.images[imageIndex] );
|
vk::Image blitDestinationImage = static_cast<vk::Image>( swapChainData.images[imageIndex] );
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
imageSubresourceLayers,
|
imageSubresourceLayers,
|
||||||
{ { vk::Offset3D( 0, 0, 0 ), vk::Offset3D( surfaceData.extent.width, surfaceData.extent.height, 1 ) } } );
|
{ { vk::Offset3D( 0, 0, 0 ), vk::Offset3D( surfaceData.extent.width, surfaceData.extent.height, 1 ) } } );
|
||||||
commandBuffer.blitImage(
|
commandBuffer.blitImage(
|
||||||
*blitSourceImage, vk::ImageLayout::eTransferSrcOptimal, blitDestinationImage, vk::ImageLayout::eTransferDstOptimal, imageBlit, vk::Filter::eLinear );
|
blitSourceImage, vk::ImageLayout::eTransferSrcOptimal, blitDestinationImage, vk::ImageLayout::eTransferDstOptimal, imageBlit, vk::Filter::eLinear );
|
||||||
|
|
||||||
// Use a barrier to make sure the blit is finished before the copy starts
|
// Use a barrier to make sure the blit is finished before the copy starts
|
||||||
// Note: for a layout of vk::ImageLayout::eTransferDstOptimal, the access mask is supposed to be vk::AccessFlagBits::eTransferWrite
|
// Note: for a layout of vk::ImageLayout::eTransferDstOptimal, the access mask is supposed to be vk::AccessFlagBits::eTransferWrite
|
||||||
@ -175,7 +175,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
// Do a image copy to part of the dst image - checks should stay small
|
// Do a image copy to part of the dst image - checks should stay small
|
||||||
vk::ImageCopy imageCopy( imageSubresourceLayers, vk::Offset3D(), imageSubresourceLayers, vk::Offset3D( 256, 256, 0 ), vk::Extent3D( 128, 128, 1 ) );
|
vk::ImageCopy imageCopy( imageSubresourceLayers, vk::Offset3D(), imageSubresourceLayers, vk::Offset3D( 256, 256, 0 ), vk::Extent3D( 128, 128, 1 ) );
|
||||||
commandBuffer.copyImage( *blitSourceImage, vk::ImageLayout::eTransferSrcOptimal, blitDestinationImage, vk::ImageLayout::eTransferDstOptimal, imageCopy );
|
commandBuffer.copyImage( blitSourceImage, vk::ImageLayout::eTransferSrcOptimal, blitDestinationImage, vk::ImageLayout::eTransferDstOptimal, imageCopy );
|
||||||
|
|
||||||
// Note: for a layout of vk::ImageLayout::ePresentSrcKHR, the access mask is supposed to be empty
|
// Note: for a layout of vk::ImageLayout::ePresentSrcKHR, the access mask is supposed to be empty
|
||||||
vk::ImageMemoryBarrier prePresentBarrier( vk::AccessFlagBits::eTransferWrite,
|
vk::ImageMemoryBarrier prePresentBarrier( vk::AccessFlagBits::eTransferWrite,
|
||||||
@ -191,11 +191,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::Fence drawFence( device, vk::FenceCreateInfo() );
|
vk::raii::Fence drawFence( device, vk::FenceCreateInfo() );
|
||||||
submitInfo = vk::SubmitInfo( {}, {}, *commandBuffer );
|
submitInfo = vk::SubmitInfo( {}, {}, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, drawFence );
|
||||||
graphicsQueue.waitIdle();
|
graphicsQueue.waitIdle();
|
||||||
|
|
||||||
/* Make sure command buffer is finished before presenting */
|
/* Make sure command buffer is finished before presenting */
|
||||||
while ( device.waitForFences( { *drawFence }, true, vk::su::FenceTimeout ) == vk::Result::eTimeout )
|
while ( device.waitForFences( { drawFence }, true, vk::su::FenceTimeout ) == vk::Result::eTimeout )
|
||||||
;
|
;
|
||||||
|
|
||||||
/* Now present the image in the window */
|
/* Now present the image in the window */
|
||||||
|
@ -47,7 +47,8 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
/* VULKAN_KEY_START */
|
/* VULKAN_KEY_START */
|
||||||
|
|
||||||
vk::DebugUtilsObjectNameInfoEXT debugUtilsObjectNameInfo( vk::ObjectType::eImage, NON_DISPATCHABLE_HANDLE_TO_UINT64_CAST( VkImage, *image ), "Image name" );
|
vk::DebugUtilsObjectNameInfoEXT debugUtilsObjectNameInfo(
|
||||||
|
vk::ObjectType::eImage, NON_DISPATCHABLE_HANDLE_TO_UINT64_CAST( VkImage, static_cast<vk ::Image>( image ) ), "Image name" );
|
||||||
device.setDebugUtilsObjectNameEXT( debugUtilsObjectNameInfo );
|
device.setDebugUtilsObjectNameEXT( debugUtilsObjectNameInfo );
|
||||||
|
|
||||||
/* VULKAN_KEY_END */
|
/* VULKAN_KEY_END */
|
||||||
|
@ -76,7 +76,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -92,7 +92,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool =
|
vk::raii::DescriptorPool descriptorPool =
|
||||||
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets(
|
vk::raii::su::updateDescriptorSets(
|
||||||
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
||||||
@ -117,7 +117,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -126,13 +126,13 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
|
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -147,7 +147,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -65,7 +65,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent );
|
vk::raii::su::DepthBufferData depthBufferData( physicalDevice, device, vk::Format::eD16Unorm, surfaceData.extent );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -120,12 +120,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
// create a DescriptorPool with vk::DescriptorType::eUniformBufferDynamic
|
// create a DescriptorPool with vk::DescriptorType::eUniformBufferDynamic
|
||||||
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBufferDynamic, 1 } } );
|
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBufferDynamic, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets( device, descriptorSet, { { vk::DescriptorType::eUniformBufferDynamic, uniformBufferData.buffer, bufferSize, nullptr } }, {} );
|
vk::raii::su::updateDescriptorSets(
|
||||||
|
device, descriptorSet, { { vk::DescriptorType::eUniformBufferDynamic, uniformBufferData.buffer, bufferSize, nullptr } }, {} );
|
||||||
|
|
||||||
vk::raii::PipelineCache pipelineCache( device, vk::PipelineCacheCreateInfo() );
|
vk::raii::PipelineCache pipelineCache( device, vk::PipelineCacheCreateInfo() );
|
||||||
vk::raii::Pipeline graphicsPipeline = vk::raii::su::makeGraphicsPipeline( device,
|
vk::raii::Pipeline graphicsPipeline =
|
||||||
|
vk::raii::su::makeGraphicsPipeline( device,
|
||||||
pipelineCache,
|
pipelineCache,
|
||||||
vertexShaderModule,
|
vertexShaderModule,
|
||||||
nullptr,
|
nullptr,
|
||||||
@ -142,7 +144,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -151,9 +153,9 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
|
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
@ -161,14 +163,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
/* The first draw should use the first matrix in the buffer */
|
/* The first draw should use the first matrix in the buffer */
|
||||||
uint32_t dynamicOffset = 0;
|
uint32_t dynamicOffset = 0;
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, dynamicOffset );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, dynamicOffset );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.draw( 12 * 3, 1, 0, 0 );
|
commandBuffer.draw( 12 * 3, 1, 0, 0 );
|
||||||
|
|
||||||
// the second draw should use the second matrix in the buffer;
|
// the second draw should use the second matrix in the buffer;
|
||||||
dynamicOffset = (uint32_t)bufferSize;
|
dynamicOffset = (uint32_t)bufferSize;
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, dynamicOffset );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, dynamicOffset );
|
||||||
commandBuffer.draw( 12 * 3, 1, 0, 0 );
|
commandBuffer.draw( 12 * 3, 1, 0, 0 );
|
||||||
|
|
||||||
commandBuffer.endRenderPass();
|
commandBuffer.endRenderPass();
|
||||||
@ -180,7 +182,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -51,14 +51,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Fence fence( device, vk::FenceCreateInfo() );
|
vk::raii::Fence fence( device, vk::FenceCreateInfo() );
|
||||||
|
|
||||||
vk::SubmitInfo submitInfo( {}, {}, *commandBuffer );
|
vk::SubmitInfo submitInfo( {}, {}, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *fence );
|
graphicsQueue.submit( submitInfo, fence );
|
||||||
|
|
||||||
// Make sure timeout is long enough for a simple command buffer without waiting for an event
|
// Make sure timeout is long enough for a simple command buffer without waiting for an event
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
int timeouts = -1;
|
int timeouts = -1;
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
result = device.waitForFences( { *fence }, true, vk::su::FenceTimeout );
|
result = device.waitForFences( { fence }, true, vk::su::FenceTimeout );
|
||||||
timeouts++;
|
timeouts++;
|
||||||
} while ( result == vk::Result::eTimeout );
|
} while ( result == vk::Result::eTimeout );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
@ -75,16 +75,16 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
commandPool.reset();
|
commandPool.reset();
|
||||||
|
|
||||||
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
||||||
commandBuffer.waitEvents( { *event }, vk::PipelineStageFlagBits::eHost, vk::PipelineStageFlagBits::eBottomOfPipe, nullptr, nullptr, nullptr );
|
commandBuffer.waitEvents( { event }, vk::PipelineStageFlagBits::eHost, vk::PipelineStageFlagBits::eBottomOfPipe, nullptr, nullptr, nullptr );
|
||||||
commandBuffer.end();
|
commandBuffer.end();
|
||||||
device.resetFences( { *fence } );
|
device.resetFences( { fence } );
|
||||||
|
|
||||||
// Note that stepping through this code in the debugger is a bad idea because the GPU can TDR waiting for the event.
|
// Note that stepping through this code in the debugger is a bad idea because the GPU can TDR waiting for the event.
|
||||||
// Execute the code from vk::Queue::submit() through vk::Device::setEvent() without breakpoints
|
// Execute the code from vk::Queue::submit() through vk::Device::setEvent() without breakpoints
|
||||||
graphicsQueue.submit( submitInfo, *fence );
|
graphicsQueue.submit( submitInfo, fence );
|
||||||
|
|
||||||
// We should timeout waiting for the fence because the GPU should be waiting on the event
|
// We should timeout waiting for the fence because the GPU should be waiting on the event
|
||||||
result = device.waitForFences( { *fence }, true, vk::su::FenceTimeout );
|
result = device.waitForFences( { fence }, true, vk::su::FenceTimeout );
|
||||||
if ( result != vk::Result::eTimeout )
|
if ( result != vk::Result::eTimeout )
|
||||||
{
|
{
|
||||||
std::cout << "Didn't get expected timeout in vk::Device::waitForFences, exiting\n";
|
std::cout << "Didn't get expected timeout in vk::Device::waitForFences, exiting\n";
|
||||||
@ -96,11 +96,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
event.set();
|
event.set();
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
result = device.waitForFences( { *fence }, true, vk::su::FenceTimeout );
|
result = device.waitForFences( { fence }, true, vk::su::FenceTimeout );
|
||||||
} while ( result == vk::Result::eTimeout );
|
} while ( result == vk::Result::eTimeout );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
|
|
||||||
device.resetFences( { *fence } );
|
device.resetFences( { fence } );
|
||||||
event.reset();
|
event.reset();
|
||||||
|
|
||||||
// reset the command buffer by resetting the complete command pool
|
// reset the command buffer by resetting the complete command pool
|
||||||
@ -108,7 +108,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
// Now set the event from the GPU and wait on the CPU
|
// Now set the event from the GPU and wait on the CPU
|
||||||
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
||||||
commandBuffer.setEvent( *event, vk::PipelineStageFlagBits::eBottomOfPipe );
|
commandBuffer.setEvent( event, vk::PipelineStageFlagBits::eBottomOfPipe );
|
||||||
commandBuffer.end();
|
commandBuffer.end();
|
||||||
|
|
||||||
// Look for the event on the CPU. It should be vk::Result::eEventReset since we haven't sent the command buffer yet.
|
// Look for the event on the CPU. It should be vk::Result::eEventReset since we haven't sent the command buffer yet.
|
||||||
@ -116,7 +116,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
assert( result == vk::Result::eEventReset );
|
assert( result == vk::Result::eEventReset );
|
||||||
|
|
||||||
// Send the command buffer and loop waiting for the event
|
// Send the command buffer and loop waiting for the event
|
||||||
graphicsQueue.submit( submitInfo, *fence );
|
graphicsQueue.submit( submitInfo, fence );
|
||||||
|
|
||||||
int polls = 0;
|
int polls = 0;
|
||||||
do
|
do
|
||||||
@ -128,7 +128,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
result = device.waitForFences( { *fence }, true, vk::su::FenceTimeout );
|
result = device.waitForFences( { fence }, true, vk::su::FenceTimeout );
|
||||||
} while ( result == vk::Result::eTimeout );
|
} while ( result == vk::Result::eTimeout );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
||||||
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -116,14 +116,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
||||||
|
|
||||||
// Populate descriptor sets
|
// Populate descriptor sets
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, *descriptorSetLayout );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, *descriptorSetLayout );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
||||||
|
|
||||||
vk::DescriptorBufferInfo bufferInfo( *uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
||||||
vk::DescriptorImageInfo imageInfo( *textureData.sampler, *textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::DescriptorImageInfo imageInfo( textureData.sampler, textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
std::array<vk::WriteDescriptorSet, 2> writeDescriptorSets = {
|
std::array<vk::WriteDescriptorSet, 2> writeDescriptorSets = {
|
||||||
vk::WriteDescriptorSet( *descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo ),
|
vk::WriteDescriptorSet( descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo ),
|
||||||
vk::WriteDescriptorSet( *descriptorSet, 1, 0, vk::DescriptorType::eCombinedImageSampler, imageInfo )
|
vk::WriteDescriptorSet( descriptorSet, 1, 0, vk::DescriptorType::eCombinedImageSampler, imageInfo )
|
||||||
};
|
};
|
||||||
device.updateDescriptorSets( writeDescriptorSets, nullptr );
|
device.updateDescriptorSets( writeDescriptorSets, nullptr );
|
||||||
|
|
||||||
@ -146,19 +146,19 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
|
@ -95,7 +95,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
imageMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
imageMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
||||||
|
|
||||||
// bind memory
|
// bind memory
|
||||||
image.bindMemory( *imageMemory, 0 );
|
image.bindMemory( imageMemory, 0 );
|
||||||
|
|
||||||
vk::raii::Buffer textureBuffer = nullptr;
|
vk::raii::Buffer textureBuffer = nullptr;
|
||||||
vk::raii::DeviceMemory textureBufferMemory = nullptr;
|
vk::raii::DeviceMemory textureBufferMemory = nullptr;
|
||||||
@ -115,7 +115,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
textureBufferMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
textureBufferMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
||||||
|
|
||||||
// bind memory
|
// bind memory
|
||||||
textureBuffer.bindMemory( *textureBufferMemory, 0 );
|
textureBuffer.bindMemory( textureBufferMemory, 0 );
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -146,21 +146,21 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
if ( needsStaging )
|
if ( needsStaging )
|
||||||
{
|
{
|
||||||
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
||||||
vk::raii::su::setImageLayout( commandBuffer, *image, format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
vk::raii::su::setImageLayout( commandBuffer, image, format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
||||||
vk::BufferImageCopy copyRegion( 0,
|
vk::BufferImageCopy copyRegion( 0,
|
||||||
surfaceData.extent.width,
|
surfaceData.extent.width,
|
||||||
surfaceData.extent.height,
|
surfaceData.extent.height,
|
||||||
vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ),
|
vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ),
|
||||||
vk::Offset3D( 0, 0, 0 ),
|
vk::Offset3D( 0, 0, 0 ),
|
||||||
vk::Extent3D( surfaceData.extent, 1 ) );
|
vk::Extent3D( surfaceData.extent, 1 ) );
|
||||||
commandBuffer.copyBufferToImage( *textureBuffer, *image, vk::ImageLayout::eTransferDstOptimal, copyRegion );
|
commandBuffer.copyBufferToImage( textureBuffer, image, vk::ImageLayout::eTransferDstOptimal, copyRegion );
|
||||||
// Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY
|
// Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY
|
||||||
vk::raii::su::setImageLayout( commandBuffer, *image, format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::raii::su::setImageLayout( commandBuffer, image, format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// If we can use the linear tiled image as a texture, just do it
|
// If we can use the linear tiled image as a texture, just do it
|
||||||
vk::raii::su::setImageLayout( commandBuffer, *image, format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::raii::su::setImageLayout( commandBuffer, image, format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
}
|
}
|
||||||
|
|
||||||
commandBuffer.end();
|
commandBuffer.end();
|
||||||
@ -183,7 +183,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::BorderColor::eFloatOpaqueWhite );
|
vk::BorderColor::eFloatOpaqueWhite );
|
||||||
vk::raii::Sampler sampler( device, samplerCreateInfo );
|
vk::raii::Sampler sampler( device, samplerCreateInfo );
|
||||||
|
|
||||||
vk::ImageViewCreateInfo imageViewCreateInfo( {}, *image, vk::ImageViewType::e2D, format, {}, { vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 } );
|
vk::ImageViewCreateInfo imageViewCreateInfo( {}, image, vk::ImageViewType::e2D, format, {}, { vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 } );
|
||||||
vk::raii::ImageView imageView( device, imageViewCreateInfo );
|
vk::raii::ImageView imageView( device, imageViewCreateInfo );
|
||||||
|
|
||||||
/* VULKAN_KEY_END */
|
/* VULKAN_KEY_END */
|
||||||
|
@ -131,13 +131,13 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
uint32_t memoryTypeIndex = vk::su::findMemoryType( physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, {} );
|
uint32_t memoryTypeIndex = vk::su::findMemoryType( physicalDevice.getMemoryProperties(), memoryRequirements.memoryTypeBits, {} );
|
||||||
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, memoryTypeIndex );
|
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, memoryTypeIndex );
|
||||||
inputMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
inputMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
||||||
inputImage.bindMemory( *inputMemory, 0 );
|
inputImage.bindMemory( inputMemory, 0 );
|
||||||
|
|
||||||
// Set the image layout to TRANSFER_DST_OPTIMAL to be ready for clear
|
// Set the image layout to TRANSFER_DST_OPTIMAL to be ready for clear
|
||||||
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
||||||
vk::raii::su::setImageLayout( commandBuffer, *inputImage, swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
vk::raii::su::setImageLayout( commandBuffer, inputImage, swapChainData.colorFormat, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
||||||
|
|
||||||
commandBuffer.clearColorImage( *inputImage,
|
commandBuffer.clearColorImage( inputImage,
|
||||||
vk::ImageLayout::eTransferDstOptimal,
|
vk::ImageLayout::eTransferDstOptimal,
|
||||||
{ std::array<float, 4>( { { 1.0f, 1.0f, 0.0f, 0.0f } } ) },
|
{ std::array<float, 4>( { { 1.0f, 1.0f, 0.0f, 0.0f } } ) },
|
||||||
{ { vk::ImageAspectFlagBits::eColor, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS } } );
|
{ { vk::ImageAspectFlagBits::eColor, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS } } );
|
||||||
@ -146,7 +146,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// RenderPassCreateInfo below
|
// RenderPassCreateInfo below
|
||||||
|
|
||||||
vk::ImageViewCreateInfo imageViewCreateInfo(
|
vk::ImageViewCreateInfo imageViewCreateInfo(
|
||||||
{}, *inputImage, vk::ImageViewType::e2D, swapChainData.colorFormat, {}, { vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 } );
|
{}, inputImage, vk::ImageViewType::e2D, swapChainData.colorFormat, {}, { vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1 } );
|
||||||
vk::raii::ImageView inputAttachmentView( device, imageViewCreateInfo );
|
vk::raii::ImageView inputAttachmentView( device, imageViewCreateInfo );
|
||||||
|
|
||||||
vk::DescriptorSetLayoutBinding layoutBinding( 0, vk::DescriptorType::eInputAttachment, 1, vk::ShaderStageFlagBits::eFragment );
|
vk::DescriptorSetLayoutBinding layoutBinding( 0, vk::DescriptorType::eInputAttachment, 1, vk::ShaderStageFlagBits::eFragment );
|
||||||
@ -208,11 +208,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, poolSize );
|
vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo( vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, poolSize );
|
||||||
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
||||||
|
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, *descriptorSetLayout );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, *descriptorSetLayout );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
||||||
|
|
||||||
vk::DescriptorImageInfo inputImageInfo( nullptr, *inputAttachmentView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::DescriptorImageInfo inputImageInfo( nullptr, inputAttachmentView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
vk::WriteDescriptorSet writeDescriptorSet( *descriptorSet, 0, 0, vk::DescriptorType::eInputAttachment, inputImageInfo );
|
vk::WriteDescriptorSet writeDescriptorSet( descriptorSet, 0, 0, vk::DescriptorType::eInputAttachment, inputImageInfo );
|
||||||
device.updateDescriptorSets( writeDescriptorSet, nullptr );
|
device.updateDescriptorSets( writeDescriptorSet, nullptr );
|
||||||
|
|
||||||
vk::raii::PipelineCache pipelineCache( device, vk::PipelineCacheCreateInfo() );
|
vk::raii::PipelineCache pipelineCache( device, vk::PipelineCacheCreateInfo() );
|
||||||
@ -222,16 +222,16 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
vk::ClearValue clearValue;
|
vk::ClearValue clearValue;
|
||||||
clearValue.color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValue.color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValue );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValue );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
|
@ -139,7 +139,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
||||||
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -166,7 +166,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorSetLayout samplerLayout( device, descriptorSetLayoutCreateInfo );
|
vk::raii::DescriptorSetLayout samplerLayout( device, descriptorSetLayoutCreateInfo );
|
||||||
|
|
||||||
// Create pipeline layout with multiple descriptor sets
|
// Create pipeline layout with multiple descriptor sets
|
||||||
std::array<vk::DescriptorSetLayout, 2> descriptorSetLayouts = { *uniformLayout, *samplerLayout };
|
std::array<vk::DescriptorSetLayout, 2> descriptorSetLayouts = { uniformLayout, samplerLayout };
|
||||||
vk::PipelineLayoutCreateInfo pipelineLayoutCreateInfo( {}, descriptorSetLayouts );
|
vk::PipelineLayoutCreateInfo pipelineLayoutCreateInfo( {}, descriptorSetLayouts );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, pipelineLayoutCreateInfo );
|
vk::raii::PipelineLayout pipelineLayout( device, pipelineLayoutCreateInfo );
|
||||||
|
|
||||||
@ -177,15 +177,15 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
||||||
|
|
||||||
// Populate descriptor sets
|
// Populate descriptor sets
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, descriptorSetLayouts );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, descriptorSetLayouts );
|
||||||
vk::raii::DescriptorSets descriptorSets( device, descriptorSetAllocateInfo );
|
vk::raii::DescriptorSets descriptorSets( device, descriptorSetAllocateInfo );
|
||||||
|
|
||||||
// Populate with info about our uniform buffer
|
// Populate with info about our uniform buffer
|
||||||
vk::DescriptorBufferInfo uniformBufferInfo( *uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
vk::DescriptorBufferInfo uniformBufferInfo( uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
||||||
vk::DescriptorImageInfo textureImageInfo( *textureData.sampler, *textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::DescriptorImageInfo textureImageInfo( textureData.sampler, textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
std::array<vk::WriteDescriptorSet, 2> writeDescriptorSets = {
|
std::array<vk::WriteDescriptorSet, 2> writeDescriptorSets = {
|
||||||
{ vk::WriteDescriptorSet( *descriptorSets[0], 0, 0, vk::DescriptorType::eUniformBuffer, {}, uniformBufferInfo ),
|
{ vk::WriteDescriptorSet( descriptorSets[0], 0, 0, vk::DescriptorType::eUniformBuffer, {}, uniformBufferInfo ),
|
||||||
vk::WriteDescriptorSet( *descriptorSets[1], 0, 0, vk::DescriptorType::eCombinedImageSampler, textureImageInfo ) }
|
vk::WriteDescriptorSet( descriptorSets[1], 0, 0, vk::DescriptorType::eCombinedImageSampler, textureImageInfo ) }
|
||||||
};
|
};
|
||||||
device.updateDescriptorSets( writeDescriptorSets, nullptr );
|
device.updateDescriptorSets( writeDescriptorSets, nullptr );
|
||||||
|
|
||||||
@ -209,19 +209,19 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSets[0], *descriptorSets[1] }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSets[0], descriptorSets[1] }, nullptr );
|
||||||
|
|
||||||
vk::Buffer buffer = *vertexBufferData.buffer;
|
vk::Buffer buffer = vertexBufferData.buffer;
|
||||||
commandBuffer.bindVertexBuffers( 0, buffer, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, buffer, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
@ -237,7 +237,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -76,7 +76,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::makeDescriptorSetLayout( device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
vk::raii::su::makeDescriptorSetLayout( device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -91,7 +91,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::copyToDevice( vertexBufferData.deviceMemory, coloredCubeData, sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) );
|
vk::raii::su::copyToDevice( vertexBufferData.deviceMemory, coloredCubeData, sizeof( coloredCubeData ) / sizeof( coloredCubeData[0] ) );
|
||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 } } );
|
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets(
|
vk::raii::su::updateDescriptorSets(
|
||||||
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, {} );
|
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, {} );
|
||||||
@ -117,7 +117,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// Get the index of the next available swapchain image:
|
// Get the index of the next available swapchain image:
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -135,39 +135,39 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, memoryTypeIndex );
|
vk::MemoryAllocateInfo memoryAllocateInfo( memoryRequirements.size, memoryTypeIndex );
|
||||||
queryResultMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
queryResultMemory = vk::raii::DeviceMemory( device, memoryAllocateInfo );
|
||||||
|
|
||||||
queryResultBuffer.bindMemory( *queryResultMemory, 0 );
|
queryResultBuffer.bindMemory( queryResultMemory, 0 );
|
||||||
|
|
||||||
vk::QueryPoolCreateInfo queryPoolCreateInfo( {}, vk::QueryType::eOcclusion, 2, {} );
|
vk::QueryPoolCreateInfo queryPoolCreateInfo( {}, vk::QueryType::eOcclusion, 2, {} );
|
||||||
vk::raii::QueryPool queryPool( device, queryPoolCreateInfo );
|
vk::raii::QueryPool queryPool( device, queryPoolCreateInfo );
|
||||||
|
|
||||||
commandBuffer.begin( {} );
|
commandBuffer.begin( {} );
|
||||||
commandBuffer.resetQueryPool( *queryPool, 0, 2 );
|
commandBuffer.resetQueryPool( queryPool, 0, 2 );
|
||||||
|
|
||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
commandBuffer.beginRenderPass(
|
commandBuffer.beginRenderPass(
|
||||||
vk::RenderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D(), surfaceData.extent ), clearValues ),
|
vk::RenderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D(), surfaceData.extent ), clearValues ),
|
||||||
vk::SubpassContents::eInline );
|
vk::SubpassContents::eInline );
|
||||||
|
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, {} );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, {} );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
|
|
||||||
commandBuffer.beginQuery( *queryPool, 0, vk::QueryControlFlags() );
|
commandBuffer.beginQuery( queryPool, 0, vk::QueryControlFlags() );
|
||||||
commandBuffer.endQuery( *queryPool, 0 );
|
commandBuffer.endQuery( queryPool, 0 );
|
||||||
|
|
||||||
commandBuffer.beginQuery( *queryPool, 1, vk::QueryControlFlags() );
|
commandBuffer.beginQuery( queryPool, 1, vk::QueryControlFlags() );
|
||||||
commandBuffer.draw( 12 * 3, 1, 0, 0 );
|
commandBuffer.draw( 12 * 3, 1, 0, 0 );
|
||||||
commandBuffer.endRenderPass();
|
commandBuffer.endRenderPass();
|
||||||
commandBuffer.endQuery( *queryPool, 1 );
|
commandBuffer.endQuery( queryPool, 1 );
|
||||||
|
|
||||||
commandBuffer.copyQueryPoolResults(
|
commandBuffer.copyQueryPoolResults(
|
||||||
*queryPool, 0, 2, *queryResultBuffer, 0, sizeof( uint64_t ), vk::QueryResultFlagBits::e64 | vk::QueryResultFlagBits::eWait );
|
queryPool, 0, 2, queryResultBuffer, 0, sizeof( uint64_t ), vk::QueryResultFlagBits::e64 | vk::QueryResultFlagBits::eWait );
|
||||||
commandBuffer.end();
|
commandBuffer.end();
|
||||||
|
|
||||||
vk::raii::Fence drawFence( device, vk::FenceCreateInfo() );
|
vk::raii::Fence drawFence( device, vk::FenceCreateInfo() );
|
||||||
@ -201,7 +201,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
queryResultMemory.unmapMemory();
|
queryResultMemory.unmapMemory();
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -42,6 +42,7 @@
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
typedef unsigned long long timestamp_t;
|
typedef unsigned long long timestamp_t;
|
||||||
|
|
||||||
timestamp_t getMilliseconds()
|
timestamp_t getMilliseconds()
|
||||||
{
|
{
|
||||||
#ifdef WIN32
|
#ifdef WIN32
|
||||||
@ -117,7 +118,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -133,7 +134,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool =
|
vk::raii::DescriptorPool descriptorPool =
|
||||||
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets(
|
vk::raii::su::updateDescriptorSets(
|
||||||
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
||||||
@ -300,7 +301,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// Get the index of the next available swapchain image:
|
// Get the index of the next available swapchain image:
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -309,12 +310,12 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
|
|
||||||
commandBuffer.beginRenderPass(
|
commandBuffer.beginRenderPass(
|
||||||
vk::RenderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D(), surfaceData.extent ), clearValues ),
|
vk::RenderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D(), surfaceData.extent ), clearValues ),
|
||||||
vk::SubpassContents::eInline );
|
vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, {} );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, {} );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -329,7 +330,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -84,7 +84,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -100,7 +100,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool =
|
vk::raii::DescriptorPool descriptorPool =
|
||||||
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets(
|
vk::raii::su::updateDescriptorSets(
|
||||||
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
||||||
@ -115,8 +115,8 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// Second pipeline has a modified fragment shader and sets the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag.
|
// Second pipeline has a modified fragment shader and sets the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag.
|
||||||
|
|
||||||
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
||||||
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eVertex, *vertexShaderModule, "main" ),
|
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eVertex, vertexShaderModule, "main" ),
|
||||||
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, *fragmentShaderModule, "main" )
|
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, fragmentShaderModule, "main" )
|
||||||
};
|
};
|
||||||
|
|
||||||
vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( texturedCubeData[0] ) );
|
vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( texturedCubeData[0] ) );
|
||||||
@ -166,8 +166,8 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
&pipelineDepthStencilStateCreateInfo,
|
&pipelineDepthStencilStateCreateInfo,
|
||||||
&pipelineColorBlendStateCreateInfo,
|
&pipelineColorBlendStateCreateInfo,
|
||||||
&pipelineDynamicStateCreateInfo,
|
&pipelineDynamicStateCreateInfo,
|
||||||
*pipelineLayout,
|
pipelineLayout,
|
||||||
*renderPass );
|
renderPass );
|
||||||
|
|
||||||
vk::raii::Pipeline basePipeline( device, pipelineCache, graphicsPipelineCreateInfo );
|
vk::raii::Pipeline basePipeline( device, pipelineCache, graphicsPipelineCreateInfo );
|
||||||
switch ( basePipeline.getConstructorSuccessCode() )
|
switch ( basePipeline.getConstructorSuccessCode() )
|
||||||
@ -200,9 +200,9 @@ void main()
|
|||||||
glslang::FinalizeProcess();
|
glslang::FinalizeProcess();
|
||||||
|
|
||||||
// Modify pipeline info to reflect derivation
|
// Modify pipeline info to reflect derivation
|
||||||
pipelineShaderStageCreateInfos[1] = vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, *fragmentShaderModule2, "main" );
|
pipelineShaderStageCreateInfos[1] = vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, fragmentShaderModule2, "main" );
|
||||||
graphicsPipelineCreateInfo.flags = vk::PipelineCreateFlagBits::eDerivative;
|
graphicsPipelineCreateInfo.flags = vk::PipelineCreateFlagBits::eDerivative;
|
||||||
graphicsPipelineCreateInfo.basePipelineHandle = *basePipeline;
|
graphicsPipelineCreateInfo.basePipelineHandle = basePipeline;
|
||||||
graphicsPipelineCreateInfo.basePipelineIndex = -1;
|
graphicsPipelineCreateInfo.basePipelineIndex = -1;
|
||||||
|
|
||||||
// And create the derived pipeline
|
// And create the derived pipeline
|
||||||
@ -223,7 +223,7 @@ void main()
|
|||||||
// Get the index of the next available swapchain image
|
// Get the index of the next available swapchain image
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -232,12 +232,12 @@ void main()
|
|||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
|
|
||||||
commandBuffer.beginRenderPass(
|
commandBuffer.beginRenderPass(
|
||||||
vk::RenderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D(), surfaceData.extent ), clearValues ),
|
vk::RenderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D(), surfaceData.extent ), clearValues ),
|
||||||
vk::SubpassContents::eInline );
|
vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *derivedPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, derivedPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, {} );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, {} );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -252,7 +252,7 @@ void main()
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -115,7 +115,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
||||||
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -148,12 +148,12 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
||||||
|
|
||||||
// Populate descriptor sets
|
// Populate descriptor sets
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, *descriptorSetLayout );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, *descriptorSetLayout );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
||||||
|
|
||||||
// Populate with info about our uniform buffer for MVP
|
// Populate with info about our uniform buffer for MVP
|
||||||
vk::DescriptorBufferInfo bufferInfo( *uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
||||||
vk::WriteDescriptorSet writeDescriptorSet( *descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo );
|
vk::WriteDescriptorSet writeDescriptorSet( descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo );
|
||||||
device.updateDescriptorSets( writeDescriptorSet, nullptr );
|
device.updateDescriptorSets( writeDescriptorSet, nullptr );
|
||||||
|
|
||||||
// Create our push constant data, which matches shader expectations
|
// Create our push constant data, which matches shader expectations
|
||||||
@ -162,7 +162,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
// Ensure we have enough room for push constant data
|
// Ensure we have enough room for push constant data
|
||||||
assert( ( sizeof( pushConstants ) <= physicalDevice.getProperties().limits.maxPushConstantsSize ) && "Too many push constants" );
|
assert( ( sizeof( pushConstants ) <= physicalDevice.getProperties().limits.maxPushConstantsSize ) && "Too many push constants" );
|
||||||
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
commandBuffer.begin( vk::CommandBufferBeginInfo() );
|
||||||
commandBuffer.pushConstants<unsigned>( *pipelineLayout, vk::ShaderStageFlagBits::eFragment, 0, pushConstants );
|
commandBuffer.pushConstants<unsigned>( pipelineLayout, vk::ShaderStageFlagBits::eFragment, 0, pushConstants );
|
||||||
|
|
||||||
/* VULKAN_KEY_END */
|
/* VULKAN_KEY_END */
|
||||||
|
|
||||||
@ -183,7 +183,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -191,12 +191,12 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
|
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -211,7 +211,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -111,7 +111,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR );
|
vk::DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -143,26 +143,26 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
|
|
||||||
vk::DescriptorBufferInfo bufferInfo( *uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
||||||
vk::DescriptorImageInfo imageInfo( *textureData.sampler, *textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::DescriptorImageInfo imageInfo( textureData.sampler, textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
vk::WriteDescriptorSet writeDescriptorSets[2] = { vk::WriteDescriptorSet( {}, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo ),
|
vk::WriteDescriptorSet writeDescriptorSets[2] = { vk::WriteDescriptorSet( {}, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo ),
|
||||||
vk::WriteDescriptorSet( {}, 1, 0, vk::DescriptorType::eCombinedImageSampler, imageInfo ) };
|
vk::WriteDescriptorSet( {}, 1, 0, vk::DescriptorType::eCombinedImageSampler, imageInfo ) };
|
||||||
|
|
||||||
// this call is from an extension and needs the dynamic dispatcher !!
|
// this call is from an extension and needs the dynamic dispatcher !!
|
||||||
commandBuffer.pushDescriptorSetKHR( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { 2, writeDescriptorSets } );
|
commandBuffer.pushDescriptorSetKHR( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { 2, writeDescriptorSets } );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -177,7 +177,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -81,7 +81,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass =
|
vk::raii::RenderPass renderPass =
|
||||||
vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format, vk::AttachmentLoadOp::eClear, vk::ImageLayout::eColorAttachmentOptimal );
|
vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format, vk::AttachmentLoadOp::eClear, vk::ImageLayout::eColorAttachmentOptimal );
|
||||||
|
|
||||||
@ -122,8 +122,8 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorPool descriptorPool =
|
vk::raii::DescriptorPool descriptorPool =
|
||||||
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 2 }, { vk::DescriptorType::eCombinedImageSampler, 2 } } );
|
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 2 }, { vk::DescriptorType::eCombinedImageSampler, 2 } } );
|
||||||
|
|
||||||
std::array<vk::DescriptorSetLayout, 2> layouts = { *descriptorSetLayout, *descriptorSetLayout };
|
std::array<vk::DescriptorSetLayout, 2> layouts = { descriptorSetLayout, descriptorSetLayout };
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, layouts );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, layouts );
|
||||||
vk::raii::DescriptorSets descriptorSets( device, descriptorSetAllocateInfo );
|
vk::raii::DescriptorSets descriptorSets( device, descriptorSetAllocateInfo );
|
||||||
assert( descriptorSets.size() == 2 );
|
assert( descriptorSets.size() == 2 );
|
||||||
|
|
||||||
@ -135,14 +135,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
/* VULKAN_KEY_START */
|
/* VULKAN_KEY_START */
|
||||||
|
|
||||||
// create four secondary command buffers, for each quadrant of the screen
|
// create four secondary command buffers, for each quadrant of the screen
|
||||||
vk::CommandBufferAllocateInfo commandBufferAllocateInfo( *commandPool, vk::CommandBufferLevel::eSecondary, 4 );
|
vk::CommandBufferAllocateInfo commandBufferAllocateInfo( commandPool, vk::CommandBufferLevel::eSecondary, 4 );
|
||||||
vk::raii::CommandBuffers secondaryCommandBuffers( device, commandBufferAllocateInfo );
|
vk::raii::CommandBuffers secondaryCommandBuffers( device, commandBufferAllocateInfo );
|
||||||
|
|
||||||
// Get the index of the next available swapchain image:
|
// Get the index of the next available swapchain image:
|
||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -157,7 +157,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::Rect2D scissor( vk::Offset2D( 0, 0 ), vk::Extent2D( surfaceData.extent ) );
|
vk::Rect2D scissor( vk::Offset2D( 0, 0 ), vk::Extent2D( surfaceData.extent ) );
|
||||||
|
|
||||||
// now we record four separate command buffers, one for each quadrant of the screen
|
// now we record four separate command buffers, one for each quadrant of the screen
|
||||||
vk::CommandBufferInheritanceInfo commandBufferInheritanceInfo( *renderPass, 0, *framebuffers[imageIndex] );
|
vk::CommandBufferInheritanceInfo commandBufferInheritanceInfo( renderPass, 0, framebuffers[imageIndex] );
|
||||||
vk::CommandBufferBeginInfo secondaryBeginInfo( vk::CommandBufferUsageFlagBits::eOneTimeSubmit | vk::CommandBufferUsageFlagBits::eRenderPassContinue,
|
vk::CommandBufferBeginInfo secondaryBeginInfo( vk::CommandBufferUsageFlagBits::eOneTimeSubmit | vk::CommandBufferUsageFlagBits::eRenderPassContinue,
|
||||||
&commandBufferInheritanceInfo );
|
&commandBufferInheritanceInfo );
|
||||||
|
|
||||||
@ -168,21 +168,21 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
viewport.y = 25.0f + 250.0f * ( i / 2 );
|
viewport.y = 25.0f + 250.0f * ( i / 2 );
|
||||||
|
|
||||||
secondaryCommandBuffers[i].begin( secondaryBeginInfo );
|
secondaryCommandBuffers[i].begin( secondaryBeginInfo );
|
||||||
secondaryCommandBuffers[i].bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
secondaryCommandBuffers[i].bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
secondaryCommandBuffers[i].bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSets[i == 0 || i == 3] }, nullptr );
|
secondaryCommandBuffers[i].bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSets[i == 0 || i == 3] }, nullptr );
|
||||||
secondaryCommandBuffers[i].bindVertexBuffers( 0, { *vertexBufferData.buffer }, offset );
|
secondaryCommandBuffers[i].bindVertexBuffers( 0, { vertexBufferData.buffer }, offset );
|
||||||
secondaryCommandBuffers[i].setViewport( 0, viewport );
|
secondaryCommandBuffers[i].setViewport( 0, viewport );
|
||||||
secondaryCommandBuffers[i].setScissor( 0, scissor );
|
secondaryCommandBuffers[i].setScissor( 0, scissor );
|
||||||
secondaryCommandBuffers[i].draw( 12 * 3, 1, 0, 0 );
|
secondaryCommandBuffers[i].draw( 12 * 3, 1, 0, 0 );
|
||||||
secondaryCommandBuffers[i].end();
|
secondaryCommandBuffers[i].end();
|
||||||
executeCommandBuffers[i] = *secondaryCommandBuffers[i];
|
executeCommandBuffers[i] = secondaryCommandBuffers[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
|
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
|
|
||||||
// specifying VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS means this render pass may ONLY call
|
// specifying VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS means this render pass may ONLY call
|
||||||
// vkCmdExecuteCommands
|
// vkCmdExecuteCommands
|
||||||
@ -209,7 +209,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
result = presentQueue.presentKHR( vk::PresentInfoKHR( {}, *swapChainData.swapChain, imageIndex, {} ) );
|
result = presentQueue.presentKHR( vk::PresentInfoKHR( {}, *swapChainData.swapChain, imageIndex, {} ) );
|
||||||
|
@ -103,7 +103,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
glm::mat4x4 mvpcMatrix = vk::su::createModelViewProjectionClipMatrix( surfaceData.extent );
|
||||||
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
vk::raii::su::copyToDevice( uniformBufferData.deviceMemory, mvpcMatrix );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -167,16 +167,16 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
vk::raii::DescriptorPool descriptorPool( device, descriptorPoolCreateInfo );
|
||||||
|
|
||||||
// Populate descriptor sets
|
// Populate descriptor sets
|
||||||
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( *descriptorPool, *descriptorSetLayout );
|
vk::DescriptorSetAllocateInfo descriptorSetAllocateInfo( descriptorPool, *descriptorSetLayout );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, descriptorSetAllocateInfo ).front() );
|
||||||
|
|
||||||
vk::DescriptorBufferInfo bufferInfo( *uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
vk::DescriptorBufferInfo bufferInfo( uniformBufferData.buffer, 0, sizeof( glm::mat4x4 ) );
|
||||||
vk::DescriptorImageInfo imageInfo( *textureData.sampler, *textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::DescriptorImageInfo imageInfo( textureData.sampler, textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
vk::DescriptorImageInfo samplerInfo( *sampler, {}, {} );
|
vk::DescriptorImageInfo samplerInfo( sampler, {}, {} );
|
||||||
std::array<vk::WriteDescriptorSet, 3> descriptorWrites = { { vk::WriteDescriptorSet(
|
std::array<vk::WriteDescriptorSet, 3> descriptorWrites = { { vk::WriteDescriptorSet(
|
||||||
*descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo ),
|
descriptorSet, 0, 0, vk::DescriptorType::eUniformBuffer, {}, bufferInfo ),
|
||||||
vk::WriteDescriptorSet( *descriptorSet, 1, 0, vk::DescriptorType::eSampledImage, imageInfo ),
|
vk::WriteDescriptorSet( descriptorSet, 1, 0, vk::DescriptorType::eSampledImage, imageInfo ),
|
||||||
vk::WriteDescriptorSet( *descriptorSet, 2, 0, vk::DescriptorType::eSampler, samplerInfo ) } };
|
vk::WriteDescriptorSet( descriptorSet, 2, 0, vk::DescriptorType::eSampler, samplerInfo ) } };
|
||||||
device.updateDescriptorSets( descriptorWrites, nullptr );
|
device.updateDescriptorSets( descriptorWrites, nullptr );
|
||||||
|
|
||||||
/* VULKAN_KEY_END */
|
/* VULKAN_KEY_END */
|
||||||
@ -199,7 +199,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -207,13 +207,13 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
|
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
|
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -228,7 +228,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -84,11 +84,12 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
std::cout << "PhysicalDevice " << i << "\n";
|
std::cout << "PhysicalDevice " << i << "\n";
|
||||||
if ( supportsGetSurfaceCapabilities2 )
|
if ( supportsGetSurfaceCapabilities2 )
|
||||||
{
|
{
|
||||||
auto surfaceCapabilities2 = physicalDevices[i]
|
auto surfaceCapabilities2 =
|
||||||
|
physicalDevices[i]
|
||||||
.getSurfaceCapabilities2KHR<vk::SurfaceCapabilities2KHR,
|
.getSurfaceCapabilities2KHR<vk::SurfaceCapabilities2KHR,
|
||||||
vk::DisplayNativeHdrSurfaceCapabilitiesAMD,
|
vk::DisplayNativeHdrSurfaceCapabilitiesAMD,
|
||||||
vk::SharedPresentSurfaceCapabilitiesKHR,
|
vk::SharedPresentSurfaceCapabilitiesKHR,
|
||||||
vk::SurfaceProtectedCapabilitiesKHR>( { *surfaceData.surface } );
|
vk::SurfaceProtectedCapabilitiesKHR>( { static_cast<vk::SurfaceKHR>( surfaceData.surface ) } );
|
||||||
|
|
||||||
vk::SurfaceCapabilitiesKHR const & surfaceCapabilities = surfaceCapabilities2.get<vk::SurfaceCapabilities2KHR>().surfaceCapabilities;
|
vk::SurfaceCapabilitiesKHR const & surfaceCapabilities = surfaceCapabilities2.get<vk::SurfaceCapabilities2KHR>().surfaceCapabilities;
|
||||||
cout( surfaceCapabilities );
|
cout( surfaceCapabilities );
|
||||||
@ -121,7 +122,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevices[i].getSurfaceCapabilitiesKHR( *surfaceData.surface );
|
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevices[i].getSurfaceCapabilitiesKHR( surfaceData.surface );
|
||||||
cout( surfaceCapabilities );
|
cout( surfaceCapabilities );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
for ( size_t i = 0; i < physicalDevices.size(); i++ )
|
for ( size_t i = 0; i < physicalDevices.size(); i++ )
|
||||||
{
|
{
|
||||||
std::cout << "PhysicalDevice " << i << "\n";
|
std::cout << "PhysicalDevice " << i << "\n";
|
||||||
std::vector<vk::SurfaceFormatKHR> surfaceFormats = physicalDevices[i].getSurfaceFormatsKHR( *surfaceData.surface );
|
std::vector<vk::SurfaceFormatKHR> surfaceFormats = physicalDevices[i].getSurfaceFormatsKHR( surfaceData.surface );
|
||||||
for ( size_t j = 0; j < surfaceFormats.size(); j++ )
|
for ( size_t j = 0; j < surfaceFormats.size(); j++ )
|
||||||
{
|
{
|
||||||
std::cout << std::string( "\t" ) << "Format " << j << "\n";
|
std::cout << std::string( "\t" ) << "Format " << j << "\n";
|
||||||
|
@ -76,7 +76,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
{ vk::DescriptorType::eCombinedImageSampler, 1, vk::ShaderStageFlagBits::eFragment } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, depthBufferData.format );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -92,9 +92,10 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool =
|
vk::raii::DescriptorPool descriptorPool =
|
||||||
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformBuffer, 1 }, { vk::DescriptorType::eCombinedImageSampler, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets( device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
vk::raii::su::updateDescriptorSets(
|
||||||
|
device, descriptorSet, { { vk::DescriptorType::eUniformBuffer, uniformBufferData.buffer, VK_WHOLE_SIZE, nullptr } }, { textureData } );
|
||||||
|
|
||||||
vk::raii::PipelineCache pipelineCache( device, vk::PipelineCacheCreateInfo() );
|
vk::raii::PipelineCache pipelineCache( device, vk::PipelineCacheCreateInfo() );
|
||||||
vk::raii::Pipeline graphicsPipeline = vk::raii::su::makeGraphicsPipeline( device,
|
vk::raii::Pipeline graphicsPipeline = vk::raii::su::makeGraphicsPipeline( device,
|
||||||
@ -114,20 +115,20 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
std::array<vk::ClearValue, 2> clearValues;
|
std::array<vk::ClearValue, 2> clearValues;
|
||||||
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValues[0].color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
clearValues[1].depthStencil = vk::ClearDepthStencilValue( 1.0f, 0 );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValues );
|
||||||
|
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.bindVertexBuffers( 0, { *vertexBufferData.buffer }, { 0 } );
|
commandBuffer.bindVertexBuffers( 0, { vertexBufferData.buffer }, { 0 } );
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
commandBuffer.setScissor( 0, vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ) );
|
||||||
@ -142,7 +143,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -104,14 +104,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::BufferData texelBufferData( physicalDevice, device, sizeof( texels ), vk::BufferUsageFlagBits::eUniformTexelBuffer );
|
vk::raii::su::BufferData texelBufferData( physicalDevice, device, sizeof( texels ), vk::BufferUsageFlagBits::eUniformTexelBuffer );
|
||||||
texelBufferData.upload( texels );
|
texelBufferData.upload( texels );
|
||||||
|
|
||||||
vk::BufferViewCreateInfo bufferViewCreateInfo( {}, *texelBufferData.buffer, texelFormat, 0, sizeof( texels ) );
|
vk::BufferViewCreateInfo bufferViewCreateInfo( {}, texelBufferData.buffer, texelFormat, 0, sizeof( texels ) );
|
||||||
vk::raii::BufferView texelBufferView( device, bufferViewCreateInfo );
|
vk::raii::BufferView texelBufferView( device, bufferViewCreateInfo );
|
||||||
|
|
||||||
vk::raii::DescriptorSetLayout descriptorSetLayout =
|
vk::raii::DescriptorSetLayout descriptorSetLayout =
|
||||||
vk::raii::su::makeDescriptorSetLayout( device, { { vk::DescriptorType::eUniformTexelBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
vk::raii::su::makeDescriptorSetLayout( device, { { vk::DescriptorType::eUniformTexelBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
||||||
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
vk::raii::PipelineLayout pipelineLayout( device, { {}, *descriptorSetLayout } );
|
||||||
|
|
||||||
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surfaceData.surface ) ).format;
|
vk::Format colorFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surfaceData.surface ) ).format;
|
||||||
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, vk::Format::eUndefined );
|
vk::raii::RenderPass renderPass = vk::raii::su::makeRenderPass( device, colorFormat, vk::Format::eUndefined );
|
||||||
|
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
@ -123,7 +123,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::su::makeFramebuffers( device, renderPass, swapChainData.imageViews, nullptr, surfaceData.extent );
|
vk::raii::su::makeFramebuffers( device, renderPass, swapChainData.imageViews, nullptr, surfaceData.extent );
|
||||||
|
|
||||||
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformTexelBuffer, 1 } } );
|
vk::raii::DescriptorPool descriptorPool = vk::raii::su::makeDescriptorPool( device, { { vk::DescriptorType::eUniformTexelBuffer, 1 } } );
|
||||||
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { *descriptorPool, *descriptorSetLayout } ).front() );
|
vk::raii::DescriptorSet descriptorSet = std::move( vk::raii::DescriptorSets( device, { descriptorPool, *descriptorSetLayout } ).front() );
|
||||||
|
|
||||||
vk::raii::su::updateDescriptorSets(
|
vk::raii::su::updateDescriptorSets(
|
||||||
device, descriptorSet, { { vk::DescriptorType::eUniformTexelBuffer, texelBufferData.buffer, VK_WHOLE_SIZE, &texelBufferView } }, {} );
|
device, descriptorSet, { { vk::DescriptorType::eUniformTexelBuffer, texelBufferData.buffer, VK_WHOLE_SIZE, &texelBufferView } }, {} );
|
||||||
@ -138,7 +138,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
vk::raii::Semaphore imageAcquiredSemaphore( device, vk::SemaphoreCreateInfo() );
|
||||||
vk::Result result;
|
vk::Result result;
|
||||||
uint32_t imageIndex;
|
uint32_t imageIndex;
|
||||||
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, *imageAcquiredSemaphore );
|
std::tie( result, imageIndex ) = swapChainData.swapChain.acquireNextImage( vk::su::FenceTimeout, imageAcquiredSemaphore );
|
||||||
assert( result == vk::Result::eSuccess );
|
assert( result == vk::Result::eSuccess );
|
||||||
assert( imageIndex < swapChainData.images.size() );
|
assert( imageIndex < swapChainData.images.size() );
|
||||||
|
|
||||||
@ -146,11 +146,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
|
|
||||||
vk::ClearValue clearValue;
|
vk::ClearValue clearValue;
|
||||||
clearValue.color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
clearValue.color = vk::ClearColorValue( 0.2f, 0.2f, 0.2f, 0.2f );
|
||||||
vk::RenderPassBeginInfo renderPassBeginInfo( *renderPass, *framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValue );
|
vk::RenderPassBeginInfo renderPassBeginInfo( renderPass, framebuffers[imageIndex], vk::Rect2D( vk::Offset2D( 0, 0 ), surfaceData.extent ), clearValue );
|
||||||
|
|
||||||
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
commandBuffer.beginRenderPass( renderPassBeginInfo, vk::SubpassContents::eInline );
|
||||||
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, *graphicsPipeline );
|
commandBuffer.bindPipeline( vk::PipelineBindPoint::eGraphics, graphicsPipeline );
|
||||||
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, *pipelineLayout, 0, { *descriptorSet }, nullptr );
|
commandBuffer.bindDescriptorSets( vk::PipelineBindPoint::eGraphics, pipelineLayout, 0, { descriptorSet }, nullptr );
|
||||||
|
|
||||||
commandBuffer.setViewport(
|
commandBuffer.setViewport(
|
||||||
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
0, vk::Viewport( 0.0f, 0.0f, static_cast<float>( surfaceData.extent.width ), static_cast<float>( surfaceData.extent.height ), 0.0f, 1.0f ) );
|
||||||
@ -166,7 +166,7 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
vk::SubmitInfo submitInfo( *imageAcquiredSemaphore, waitDestinationStageMask, *commandBuffer );
|
||||||
graphicsQueue.submit( submitInfo, *drawFence );
|
graphicsQueue.submit( submitInfo, *drawFence );
|
||||||
|
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { drawFence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
|
|
||||||
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
vk::PresentInfoKHR presentInfoKHR( nullptr, *swapChainData.swapChain, imageIndex );
|
||||||
|
@ -186,7 +186,7 @@ namespace vk
|
|||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
deviceMemory = vk::raii::su::allocateDeviceMemory( device, physicalDevice.getMemoryProperties(), buffer.getMemoryRequirements(), propertyFlags );
|
deviceMemory = vk::raii::su::allocateDeviceMemory( device, physicalDevice.getMemoryProperties(), buffer.getMemoryRequirements(), propertyFlags );
|
||||||
buffer.bindMemory( *deviceMemory, 0 );
|
buffer.bindMemory( deviceMemory, 0 );
|
||||||
}
|
}
|
||||||
|
|
||||||
BufferData( std::nullptr_t ) {}
|
BufferData( std::nullptr_t ) {}
|
||||||
@ -279,8 +279,8 @@ namespace vk
|
|||||||
initialLayout } )
|
initialLayout } )
|
||||||
{
|
{
|
||||||
deviceMemory = vk::raii::su::allocateDeviceMemory( device, physicalDevice.getMemoryProperties(), image.getMemoryRequirements(), memoryProperties );
|
deviceMemory = vk::raii::su::allocateDeviceMemory( device, physicalDevice.getMemoryProperties(), image.getMemoryRequirements(), memoryProperties );
|
||||||
image.bindMemory( *deviceMemory, 0 );
|
image.bindMemory( deviceMemory, 0 );
|
||||||
imageView = vk::raii::ImageView( device, vk::ImageViewCreateInfo( {}, *image, vk::ImageViewType::e2D, format, {}, { aspectMask, 0, 1, 0, 1 } ) );
|
imageView = vk::raii::ImageView( device, vk::ImageViewCreateInfo( {}, image, vk::ImageViewType::e2D, format, {}, { aspectMask, 0, 1, 0, 1 } ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
ImageData( std::nullptr_t ) {}
|
ImageData( std::nullptr_t ) {}
|
||||||
@ -337,10 +337,10 @@ namespace vk
|
|||||||
uint32_t graphicsQueueFamilyIndex,
|
uint32_t graphicsQueueFamilyIndex,
|
||||||
uint32_t presentQueueFamilyIndex )
|
uint32_t presentQueueFamilyIndex )
|
||||||
{
|
{
|
||||||
vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( *surface ) );
|
vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surface ) );
|
||||||
colorFormat = surfaceFormat.format;
|
colorFormat = surfaceFormat.format;
|
||||||
|
|
||||||
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( *surface );
|
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( surface );
|
||||||
vk::Extent2D swapchainExtent;
|
vk::Extent2D swapchainExtent;
|
||||||
if ( surfaceCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max() )
|
if ( surfaceCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max() )
|
||||||
{
|
{
|
||||||
@ -361,9 +361,9 @@ namespace vk
|
|||||||
: ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied ) ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied
|
: ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied ) ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied
|
||||||
: ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit ) ? vk::CompositeAlphaFlagBitsKHR::eInherit
|
: ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit ) ? vk::CompositeAlphaFlagBitsKHR::eInherit
|
||||||
: vk::CompositeAlphaFlagBitsKHR::eOpaque;
|
: vk::CompositeAlphaFlagBitsKHR::eOpaque;
|
||||||
vk::PresentModeKHR presentMode = vk::su::pickPresentMode( physicalDevice.getSurfacePresentModesKHR( *surface ) );
|
vk::PresentModeKHR presentMode = vk::su::pickPresentMode( physicalDevice.getSurfacePresentModesKHR( surface ) );
|
||||||
vk::SwapchainCreateInfoKHR swapChainCreateInfo( {},
|
vk::SwapchainCreateInfoKHR swapChainCreateInfo( {},
|
||||||
*surface,
|
surface,
|
||||||
vk::su::clamp( 3u, surfaceCapabilities.minImageCount, surfaceCapabilities.maxImageCount ),
|
vk::su::clamp( 3u, surfaceCapabilities.minImageCount, surfaceCapabilities.maxImageCount ),
|
||||||
colorFormat,
|
colorFormat,
|
||||||
surfaceFormat.colorSpace,
|
surfaceFormat.colorSpace,
|
||||||
@ -477,24 +477,23 @@ namespace vk
|
|||||||
if ( needsStaging )
|
if ( needsStaging )
|
||||||
{
|
{
|
||||||
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
||||||
vk::raii::su::setImageLayout(
|
vk::raii::su::setImageLayout( commandBuffer, imageData.image, imageData.format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
||||||
commandBuffer, *imageData.image, imageData.format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
|
||||||
vk::BufferImageCopy copyRegion( 0,
|
vk::BufferImageCopy copyRegion( 0,
|
||||||
extent.width,
|
extent.width,
|
||||||
extent.height,
|
extent.height,
|
||||||
vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ),
|
vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ),
|
||||||
vk::Offset3D( 0, 0, 0 ),
|
vk::Offset3D( 0, 0, 0 ),
|
||||||
vk::Extent3D( extent, 1 ) );
|
vk::Extent3D( extent, 1 ) );
|
||||||
commandBuffer.copyBufferToImage( *stagingBufferData.buffer, *imageData.image, vk::ImageLayout::eTransferDstOptimal, copyRegion );
|
commandBuffer.copyBufferToImage( stagingBufferData.buffer, imageData.image, vk::ImageLayout::eTransferDstOptimal, copyRegion );
|
||||||
// Set the layout for the texture image from eTransferDstOptimal to eShaderReadOnlyOptimal
|
// Set the layout for the texture image from eTransferDstOptimal to eShaderReadOnlyOptimal
|
||||||
vk::raii::su::setImageLayout(
|
vk::raii::su::setImageLayout(
|
||||||
commandBuffer, *imageData.image, imageData.format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal );
|
commandBuffer, imageData.image, imageData.format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// If we can use the linear tiled image as a texture, just do it
|
// If we can use the linear tiled image as a texture, just do it
|
||||||
vk::raii::su::setImageLayout(
|
vk::raii::su::setImageLayout(
|
||||||
commandBuffer, *imageData.image, imageData.format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal );
|
commandBuffer, imageData.image, imageData.format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -513,7 +512,7 @@ namespace vk
|
|||||||
assert( queueFamilyProperties.size() < std::numeric_limits<uint32_t>::max() );
|
assert( queueFamilyProperties.size() < std::numeric_limits<uint32_t>::max() );
|
||||||
|
|
||||||
uint32_t graphicsQueueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex( queueFamilyProperties );
|
uint32_t graphicsQueueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex( queueFamilyProperties );
|
||||||
if ( physicalDevice.getSurfaceSupportKHR( graphicsQueueFamilyIndex, *surface ) )
|
if ( physicalDevice.getSurfaceSupportKHR( graphicsQueueFamilyIndex, surface ) )
|
||||||
{
|
{
|
||||||
return std::make_pair( graphicsQueueFamilyIndex,
|
return std::make_pair( graphicsQueueFamilyIndex,
|
||||||
graphicsQueueFamilyIndex ); // the first graphicsQueueFamilyIndex does also support presents
|
graphicsQueueFamilyIndex ); // the first graphicsQueueFamilyIndex does also support presents
|
||||||
@ -524,7 +523,7 @@ namespace vk
|
|||||||
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
||||||
{
|
{
|
||||||
if ( ( queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics ) &&
|
if ( ( queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics ) &&
|
||||||
physicalDevice.getSurfaceSupportKHR( static_cast<uint32_t>( i ), *surface ) )
|
physicalDevice.getSurfaceSupportKHR( static_cast<uint32_t>( i ), surface ) )
|
||||||
{
|
{
|
||||||
return std::make_pair( static_cast<uint32_t>( i ), static_cast<uint32_t>( i ) );
|
return std::make_pair( static_cast<uint32_t>( i ), static_cast<uint32_t>( i ) );
|
||||||
}
|
}
|
||||||
@ -534,7 +533,7 @@ namespace vk
|
|||||||
// family index that supports present
|
// family index that supports present
|
||||||
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
for ( size_t i = 0; i < queueFamilyProperties.size(); i++ )
|
||||||
{
|
{
|
||||||
if ( physicalDevice.getSurfaceSupportKHR( static_cast<uint32_t>( i ), *surface ) )
|
if ( physicalDevice.getSurfaceSupportKHR( static_cast<uint32_t>( i ), surface ) )
|
||||||
{
|
{
|
||||||
return std::make_pair( graphicsQueueFamilyIndex, static_cast<uint32_t>( i ) );
|
return std::make_pair( graphicsQueueFamilyIndex, static_cast<uint32_t>( i ) );
|
||||||
}
|
}
|
||||||
@ -545,7 +544,7 @@ namespace vk
|
|||||||
|
|
||||||
vk::raii::CommandBuffer makeCommandBuffer( vk::raii::Device const & device, vk::raii::CommandPool const & commandPool )
|
vk::raii::CommandBuffer makeCommandBuffer( vk::raii::Device const & device, vk::raii::CommandPool const & commandPool )
|
||||||
{
|
{
|
||||||
vk::CommandBufferAllocateInfo commandBufferAllocateInfo( *commandPool, vk::CommandBufferLevel::ePrimary, 1 );
|
vk::CommandBufferAllocateInfo commandBufferAllocateInfo( commandPool, vk::CommandBufferLevel::ePrimary, 1 );
|
||||||
return std::move( vk::raii::CommandBuffers( device, commandBufferAllocateInfo ).front() );
|
return std::move( vk::raii::CommandBuffers( device, commandBufferAllocateInfo ).front() );
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -607,15 +606,15 @@ namespace vk
|
|||||||
vk::Extent2D const & extent )
|
vk::Extent2D const & extent )
|
||||||
{
|
{
|
||||||
vk::ImageView attachments[2];
|
vk::ImageView attachments[2];
|
||||||
attachments[1] = pDepthImageView ? **pDepthImageView : vk::ImageView();
|
attachments[1] = pDepthImageView ? *pDepthImageView : vk::ImageView();
|
||||||
|
|
||||||
vk::FramebufferCreateInfo framebufferCreateInfo(
|
vk::FramebufferCreateInfo framebufferCreateInfo(
|
||||||
vk::FramebufferCreateFlags(), *renderPass, pDepthImageView ? 2 : 1, attachments, extent.width, extent.height, 1 );
|
vk::FramebufferCreateFlags(), renderPass, pDepthImageView ? 2 : 1, attachments, extent.width, extent.height, 1 );
|
||||||
std::vector<vk::raii::Framebuffer> framebuffers;
|
std::vector<vk::raii::Framebuffer> framebuffers;
|
||||||
framebuffers.reserve( imageViews.size() );
|
framebuffers.reserve( imageViews.size() );
|
||||||
for ( auto const & imageView : imageViews )
|
for ( auto const & imageView : imageViews )
|
||||||
{
|
{
|
||||||
attachments[0] = *imageView;
|
attachments[0] = imageView;
|
||||||
framebuffers.push_back( vk::raii::Framebuffer( device, framebufferCreateInfo ) );
|
framebuffers.push_back( vk::raii::Framebuffer( device, framebufferCreateInfo ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -636,8 +635,8 @@ namespace vk
|
|||||||
vk::raii::RenderPass const & renderPass )
|
vk::raii::RenderPass const & renderPass )
|
||||||
{
|
{
|
||||||
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
||||||
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eVertex, *vertexShaderModule, "main", vertexShaderSpecializationInfo ),
|
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eVertex, vertexShaderModule, "main", vertexShaderSpecializationInfo ),
|
||||||
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, *fragmentShaderModule, "main", fragmentShaderSpecializationInfo )
|
vk::PipelineShaderStageCreateInfo( {}, vk::ShaderStageFlagBits::eFragment, fragmentShaderModule, "main", fragmentShaderSpecializationInfo )
|
||||||
};
|
};
|
||||||
|
|
||||||
std::vector<vk::VertexInputAttributeDescription> vertexInputAttributeDescriptions;
|
std::vector<vk::VertexInputAttributeDescription> vertexInputAttributeDescriptions;
|
||||||
@ -705,8 +704,8 @@ namespace vk
|
|||||||
&pipelineDepthStencilStateCreateInfo,
|
&pipelineDepthStencilStateCreateInfo,
|
||||||
&pipelineColorBlendStateCreateInfo,
|
&pipelineColorBlendStateCreateInfo,
|
||||||
&pipelineDynamicStateCreateInfo,
|
&pipelineDynamicStateCreateInfo,
|
||||||
*pipelineLayout,
|
pipelineLayout,
|
||||||
*renderPass );
|
renderPass );
|
||||||
|
|
||||||
return vk::raii::Pipeline( device, pipelineCache, graphicsPipelineCreateInfo );
|
return vk::raii::Pipeline( device, pipelineCache, graphicsPipelineCreateInfo );
|
||||||
}
|
}
|
||||||
@ -814,8 +813,8 @@ namespace vk
|
|||||||
void submitAndWait( vk::raii::Device const & device, vk::raii::Queue const & queue, vk::raii::CommandBuffer const & commandBuffer )
|
void submitAndWait( vk::raii::Device const & device, vk::raii::Queue const & queue, vk::raii::CommandBuffer const & commandBuffer )
|
||||||
{
|
{
|
||||||
vk::raii::Fence fence( device, vk::FenceCreateInfo() );
|
vk::raii::Fence fence( device, vk::FenceCreateInfo() );
|
||||||
queue.submit( vk::SubmitInfo( nullptr, nullptr, *commandBuffer ), *fence );
|
queue.submit( vk::SubmitInfo( nullptr, nullptr, *commandBuffer ), fence );
|
||||||
while ( vk::Result::eTimeout == device.waitForFences( { *fence }, VK_TRUE, vk::su::FenceTimeout ) )
|
while ( vk::Result::eTimeout == device.waitForFences( { fence }, VK_TRUE, vk::su::FenceTimeout ) )
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -834,18 +833,18 @@ namespace vk
|
|||||||
uint32_t dstBinding = bindingOffset;
|
uint32_t dstBinding = bindingOffset;
|
||||||
for ( auto const & bd : bufferData )
|
for ( auto const & bd : bufferData )
|
||||||
{
|
{
|
||||||
bufferInfos.emplace_back( *std::get<1>( bd ), 0, std::get<2>( bd ) );
|
bufferInfos.emplace_back( std::get<1>( bd ), 0, std::get<2>( bd ) );
|
||||||
vk::BufferView bufferView;
|
vk::BufferView bufferView;
|
||||||
if ( std::get<3>( bd ) )
|
if ( std::get<3>( bd ) )
|
||||||
{
|
{
|
||||||
bufferView = **std::get<3>( bd );
|
bufferView = *std::get<3>( bd );
|
||||||
}
|
}
|
||||||
writeDescriptorSets.emplace_back(
|
writeDescriptorSets.emplace_back(
|
||||||
*descriptorSet, dstBinding++, 0, 1, std::get<0>( bd ), nullptr, &bufferInfos.back(), std::get<3>( bd ) ? &bufferView : nullptr );
|
descriptorSet, dstBinding++, 0, 1, std::get<0>( bd ), nullptr, &bufferInfos.back(), std::get<3>( bd ) ? &bufferView : nullptr );
|
||||||
}
|
}
|
||||||
|
|
||||||
vk::DescriptorImageInfo imageInfo( *textureData.sampler, *textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
vk::DescriptorImageInfo imageInfo( textureData.sampler, textureData.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
writeDescriptorSets.emplace_back( *descriptorSet, dstBinding, 0, vk::DescriptorType::eCombinedImageSampler, imageInfo, nullptr, nullptr );
|
writeDescriptorSets.emplace_back( descriptorSet, dstBinding, 0, vk::DescriptorType::eCombinedImageSampler, imageInfo, nullptr, nullptr );
|
||||||
|
|
||||||
device.updateDescriptorSets( writeDescriptorSets, nullptr );
|
device.updateDescriptorSets( writeDescriptorSets, nullptr );
|
||||||
}
|
}
|
||||||
@ -865,14 +864,14 @@ namespace vk
|
|||||||
uint32_t dstBinding = bindingOffset;
|
uint32_t dstBinding = bindingOffset;
|
||||||
for ( auto const & bd : bufferData )
|
for ( auto const & bd : bufferData )
|
||||||
{
|
{
|
||||||
bufferInfos.emplace_back( *std::get<1>( bd ), 0, std::get<2>( bd ) );
|
bufferInfos.emplace_back( std::get<1>( bd ), 0, std::get<2>( bd ) );
|
||||||
vk::BufferView bufferView;
|
vk::BufferView bufferView;
|
||||||
if ( std::get<3>( bd ) )
|
if ( std::get<3>( bd ) )
|
||||||
{
|
{
|
||||||
bufferView = **std::get<3>( bd );
|
bufferView = *std::get<3>( bd );
|
||||||
}
|
}
|
||||||
writeDescriptorSets.emplace_back(
|
writeDescriptorSets.emplace_back(
|
||||||
*descriptorSet, dstBinding++, 0, 1, std::get<0>( bd ), nullptr, &bufferInfos.back(), std::get<3>( bd ) ? &bufferView : nullptr );
|
descriptorSet, dstBinding++, 0, 1, std::get<0>( bd ), nullptr, &bufferInfos.back(), std::get<3>( bd ) ? &bufferView : nullptr );
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<vk::DescriptorImageInfo> imageInfos;
|
std::vector<vk::DescriptorImageInfo> imageInfos;
|
||||||
@ -881,9 +880,9 @@ namespace vk
|
|||||||
imageInfos.reserve( textureData.size() );
|
imageInfos.reserve( textureData.size() );
|
||||||
for ( auto const & thd : textureData )
|
for ( auto const & thd : textureData )
|
||||||
{
|
{
|
||||||
imageInfos.emplace_back( *thd.sampler, *thd.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
imageInfos.emplace_back( thd.sampler, thd.imageData.imageView, vk::ImageLayout::eShaderReadOnlyOptimal );
|
||||||
}
|
}
|
||||||
writeDescriptorSets.emplace_back( *descriptorSet,
|
writeDescriptorSets.emplace_back( descriptorSet,
|
||||||
dstBinding,
|
dstBinding,
|
||||||
0,
|
0,
|
||||||
vk::su::checked_cast<uint32_t>( imageInfos.size() ),
|
vk::su::checked_cast<uint32_t>( imageInfos.size() ),
|
||||||
|
@ -934,6 +934,10 @@ With this define, you can disable these declarations, but you will have to decla
|
|||||||
|
|
||||||
If both, VULKAN_HPP_NO_EXCEPTIONS and VULKAN_HPP_EXPECTED are defined, the vk::raii-classes don't throw exceptions. That is, the actual constructors are not available, but the creation-functions must be used. For more details have a look at the vk_raii_ProgrammingGuide.md.
|
If both, VULKAN_HPP_NO_EXCEPTIONS and VULKAN_HPP_EXPECTED are defined, the vk::raii-classes don't throw exceptions. That is, the actual constructors are not available, but the creation-functions must be used. For more details have a look at the vk_raii_ProgrammingGuide.md.
|
||||||
|
|
||||||
|
#### VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST
|
||||||
|
|
||||||
|
Even though the ```vk::UniqueHandles``` and the ```vk::SharedHandles``` are semantically close to pointers, an implicit cast operator to the underlying ```vk::Handle``` might be handy. You can add that implicit cast operator by defining ```VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST```.
|
||||||
|
|
||||||
#### VULKAN_HPP_STORAGE_API
|
#### VULKAN_HPP_STORAGE_API
|
||||||
|
|
||||||
With this define you can specify whether the ```DispatchLoaderDynamic``` is imported or exported (see ```VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE```). If ```VULKAN_HPP_STORAGE_API``` is not defined externally, and ```VULKAN_HPP_STORAGE_SHARED``` is defined, depending on the ```VULKAN_HPP_STORAGE_SHARED_EXPORT``` being defined, ```VULKAN_HPP_STORAGE_API``` is either set to ```__declspec( dllexport )``` (for MSVC) / ```__attribute__( ( visibility( "default" ) ) )``` (for gcc or clang) or ```__declspec( dllimport )``` (for MSVC), respectively. For other compilers, you might specify the corresponding storage by defining ```VULKAN_HPP_STORAGE_API``` on your own.
|
With this define you can specify whether the ```DispatchLoaderDynamic``` is imported or exported (see ```VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE```). If ```VULKAN_HPP_STORAGE_API``` is not defined externally, and ```VULKAN_HPP_STORAGE_SHARED``` is defined, depending on the ```VULKAN_HPP_STORAGE_SHARED_EXPORT``` being defined, ```VULKAN_HPP_STORAGE_API``` is either set to ```__declspec( dllexport )``` (for MSVC) / ```__attribute__( ( visibility( "default" ) ) )``` (for gcc or clang) or ```__declspec( dllimport )``` (for MSVC), respectively. For other compilers, you might specify the corresponding storage by defining ```VULKAN_HPP_STORAGE_API``` on your own.
|
||||||
|
@ -8404,6 +8404,11 @@ ${moveAssignmentInstructions}
|
|||||||
return m_${handleName};
|
return m_${handleName};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::${handleType}() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_${handleName};
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
${clearMembers}
|
${clearMembers}
|
||||||
@ -9016,7 +9021,7 @@ std::string
|
|||||||
{
|
{
|
||||||
if ( destructorParam.type.type == "Vk" + parentType )
|
if ( destructorParam.type.type == "Vk" + parentType )
|
||||||
{
|
{
|
||||||
initializationList += "m_" + parentName + "( *" + parentName + " ), ";
|
initializationList += "m_" + parentName + "( " + parentName + " ), ";
|
||||||
}
|
}
|
||||||
else if ( destructorParam.type.type == handle.first )
|
else if ( destructorParam.type.type == handle.first )
|
||||||
{
|
{
|
||||||
|
@ -274,7 +274,7 @@ inline std::vector<tinyxml2::XMLElement const *> getChildElements( ElementContai
|
|||||||
return childElements;
|
return childElements;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool isHexNumber(std::string const& name)
|
inline bool isHexNumber( std::string const & name )
|
||||||
{
|
{
|
||||||
return name.starts_with( "0x" ) && ( name.find_first_not_of( "0123456789ABCDEF", 2 ) == std::string::npos );
|
return name.starts_with( "0x" ) && ( name.find_first_not_of( "0123456789ABCDEF", 2 ) == std::string::npos );
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
// VulkanHpp Samples : SharedHandles
|
// VulkanHpp Samples : SharedHandles
|
||||||
// Draw a textured cube using shared handles for resource management and correct order of destruction
|
// Draw a textured cube using shared handles for resource management and correct order of destruction
|
||||||
|
|
||||||
|
#define VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST
|
||||||
|
|
||||||
#include "../utils/geometries.hpp"
|
#include "../utils/geometries.hpp"
|
||||||
#include "../utils/math.hpp"
|
#include "../utils/math.hpp"
|
||||||
#include "../utils/shaders.hpp"
|
#include "../utils/shaders.hpp"
|
||||||
@ -34,9 +36,16 @@ std::vector<vk::SharedFramebuffer> makeSharedFramebuffers( const vk::SharedDevic
|
|||||||
const vk::SharedImageView & depthImageView,
|
const vk::SharedImageView & depthImageView,
|
||||||
const vk::Extent2D & extent )
|
const vk::Extent2D & extent )
|
||||||
{
|
{
|
||||||
|
// show the simplified usage with VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST defined
|
||||||
|
#if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
auto renderPassHandle = renderPass.get(); // lvalue reference is required for the capture below
|
||||||
|
std::vector<vk::SharedFramebuffer> sharedFramebuffers;
|
||||||
|
std::vector<vk::Framebuffer> framebuffers = vk::su::createFramebuffers( device, renderPassHandle, imageViews, depthImageView, extent );
|
||||||
|
#else
|
||||||
auto renderPassHandle = renderPass.get(); // lvalue reference is required for the capture below
|
auto renderPassHandle = renderPass.get(); // lvalue reference is required for the capture below
|
||||||
std::vector<vk::SharedFramebuffer> sharedFramebuffers;
|
std::vector<vk::SharedFramebuffer> sharedFramebuffers;
|
||||||
std::vector<vk::Framebuffer> framebuffers = vk::su::createFramebuffers( device.get(), renderPassHandle, imageViews, depthImageView.get(), extent );
|
std::vector<vk::Framebuffer> framebuffers = vk::su::createFramebuffers( device.get(), renderPassHandle, imageViews, depthImageView.get(), extent );
|
||||||
|
#endif
|
||||||
sharedFramebuffers.reserve( framebuffers.size() );
|
sharedFramebuffers.reserve( framebuffers.size() );
|
||||||
for ( auto & framebuffer : framebuffers )
|
for ( auto & framebuffer : framebuffers )
|
||||||
{
|
{
|
||||||
@ -115,7 +124,6 @@ public:
|
|||||||
device };
|
device };
|
||||||
graphicsQueue = vk::SharedQueue{ device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ), device };
|
graphicsQueue = vk::SharedQueue{ device->getQueue( graphicsAndPresentQueueFamilyIndex.first, 0 ), device };
|
||||||
|
|
||||||
|
|
||||||
presentQueue = vk::SharedQueue{ device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ), device };
|
presentQueue = vk::SharedQueue{ device->getQueue( graphicsAndPresentQueueFamilyIndex.second, 0 ), device };
|
||||||
|
|
||||||
depthFormat = vk::Format::eD16Unorm;
|
depthFormat = vk::Format::eD16Unorm;
|
||||||
|
@ -354,13 +354,13 @@ namespace vk
|
|||||||
void * /*pUserData*/ )
|
void * /*pUserData*/ )
|
||||||
{
|
{
|
||||||
#if !defined( NDEBUG )
|
#if !defined( NDEBUG )
|
||||||
if ( static_cast<uint32_t>(pCallbackData->messageIdNumber) == 0x822806fa )
|
if ( static_cast<uint32_t>( pCallbackData->messageIdNumber ) == 0x822806fa )
|
||||||
{
|
{
|
||||||
// Validation Warning: vkCreateInstance(): to enable extension VK_EXT_debug_utils, but this extension is intended to support use by applications when
|
// Validation Warning: vkCreateInstance(): to enable extension VK_EXT_debug_utils, but this extension is intended to support use by applications when
|
||||||
// debugging and it is strongly recommended that it be otherwise avoided.
|
// debugging and it is strongly recommended that it be otherwise avoided.
|
||||||
return vk::False;
|
return vk::False;
|
||||||
}
|
}
|
||||||
else if ( static_cast<uint32_t>(pCallbackData->messageIdNumber) == 0xe8d1a9fe )
|
else if ( static_cast<uint32_t>( pCallbackData->messageIdNumber ) == 0xe8d1a9fe )
|
||||||
{
|
{
|
||||||
// Validation Performance Warning: Using debug builds of the validation layers *will* adversely affect performance.
|
// Validation Performance Warning: Using debug builds of the validation layers *will* adversely affect performance.
|
||||||
return vk::False;
|
return vk::False;
|
||||||
|
@ -69,7 +69,10 @@ class ReferenceCounter
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
template <typename... Args>
|
template <typename... Args>
|
||||||
ReferenceCounter( Args &&... control_args ) : m_header( std::forward<Args>( control_args )... ){}
|
ReferenceCounter( Args &&... control_args ) : m_header( std::forward<Args>( control_args )... )
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
ReferenceCounter( const ReferenceCounter & ) = delete;
|
ReferenceCounter( const ReferenceCounter & ) = delete;
|
||||||
ReferenceCounter & operator=( const ReferenceCounter & ) = delete;
|
ReferenceCounter & operator=( const ReferenceCounter & ) = delete;
|
||||||
|
|
||||||
@ -163,6 +166,13 @@ public:
|
|||||||
return bool( m_handle );
|
return bool( m_handle );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
operator HandleType() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_handle;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
|
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
return &m_handle;
|
return &m_handle;
|
||||||
|
@ -1,32 +1,31 @@
|
|||||||
#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
|
#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
|
||||||
template <typename Type, typename Dispatch>
|
template <typename Type, typename Dispatch>
|
||||||
class UniqueHandleTraits;
|
class UniqueHandleTraits;
|
||||||
|
|
||||||
template <typename Type, typename Dispatch>
|
template <typename Type, typename Dispatch>
|
||||||
class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
|
class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;
|
using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
using element_type = Type;
|
using element_type = Type;
|
||||||
|
|
||||||
UniqueHandle()
|
UniqueHandle() : Deleter(), m_value() {}
|
||||||
: Deleter()
|
|
||||||
, m_value()
|
|
||||||
{}
|
|
||||||
|
|
||||||
explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
|
explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
|
||||||
: Deleter( deleter )
|
: Deleter( deleter )
|
||||||
, m_value( value )
|
, m_value( value )
|
||||||
{}
|
{
|
||||||
|
}
|
||||||
|
|
||||||
UniqueHandle( UniqueHandle const & ) = delete;
|
UniqueHandle( UniqueHandle const & ) = delete;
|
||||||
|
|
||||||
UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
|
UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
|
||||||
: Deleter( std::move( static_cast<Deleter &>( other ) ) )
|
: Deleter( std::move( static_cast<Deleter &>( other ) ) )
|
||||||
, m_value( other.release() )
|
, m_value( other.release() )
|
||||||
{}
|
{
|
||||||
|
}
|
||||||
|
|
||||||
~UniqueHandle() VULKAN_HPP_NOEXCEPT
|
~UniqueHandle() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
@ -50,6 +49,13 @@
|
|||||||
return m_value.operator bool();
|
return m_value.operator bool();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
operator Type() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_value;
|
||||||
|
}
|
||||||
|
# endif
|
||||||
|
|
||||||
Type const * operator->() const VULKAN_HPP_NOEXCEPT
|
Type const * operator->() const VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
return &m_value;
|
return &m_value;
|
||||||
@ -105,23 +111,21 @@
|
|||||||
std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) );
|
std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Type m_value;
|
Type m_value;
|
||||||
};
|
};
|
||||||
|
|
||||||
template <typename UniqueType>
|
template <typename UniqueType>
|
||||||
VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type>
|
VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw( std::vector<UniqueType> const & handles )
|
||||||
uniqueToRaw( std::vector<UniqueType> const & handles )
|
{
|
||||||
{
|
|
||||||
std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
|
std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
|
||||||
std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
|
std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
|
||||||
return newBuffer;
|
return newBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Type, typename Dispatch>
|
template <typename Type, typename Dispatch>
|
||||||
VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs,
|
VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs, UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
|
||||||
UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
|
{
|
||||||
{
|
|
||||||
lhs.swap( rhs );
|
lhs.swap( rhs );
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
// only if VULKAN_HPP_RAII_NO_EXCEPTIONS really is defined, this test is meaningfull and needs to compile and run.
|
// only if VULKAN_HPP_RAII_NO_EXCEPTIONS really is defined, this test is meaningfull and needs to compile and run.
|
||||||
#if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
|
#if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
|
||||||
#include <vulkan/vulkan_raii.hpp>
|
# include <vulkan/vulkan_raii.hpp>
|
||||||
|
|
||||||
static char const * AppName = "NoExceptions";
|
static char const * AppName = "NoExceptions";
|
||||||
static char const * EngineName = "Vulkan.hpp";
|
static char const * EngineName = "Vulkan.hpp";
|
||||||
@ -60,12 +60,11 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
assert( device.has_value() );
|
assert( device.has_value() );
|
||||||
|
|
||||||
// create a CommandPool to allocate a CommandBuffer from
|
// create a CommandPool to allocate a CommandBuffer from
|
||||||
auto commandPool =
|
auto commandPool = device->createCommandPool( vk::CommandPoolCreateInfo( vk::CommandPoolCreateFlags(), deviceQueueCreateInfo.queueFamilyIndex ) );
|
||||||
device->createCommandPool( vk::CommandPoolCreateInfo( vk::CommandPoolCreateFlags(), deviceQueueCreateInfo.queueFamilyIndex ) );
|
|
||||||
assert( commandPool.has_value() );
|
assert( commandPool.has_value() );
|
||||||
|
|
||||||
// allocate a CommandBuffer from the CommandPool
|
// allocate a CommandBuffer from the CommandPool
|
||||||
auto commandBuffers = device->allocateCommandBuffers( vk::CommandBufferAllocateInfo( **commandPool, vk::CommandBufferLevel::ePrimary, 1 ) );
|
auto commandBuffers = device->allocateCommandBuffers( vk::CommandBufferAllocateInfo( *commandPool, vk::CommandBufferLevel::ePrimary, 1 ) );
|
||||||
assert( commandBuffers.has_value() );
|
assert( commandBuffers.has_value() );
|
||||||
|
|
||||||
auto commandBuffer = std::move( commandBuffers->front() );
|
auto commandBuffer = std::move( commandBuffers->front() );
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
//
|
//
|
||||||
// VulkanHpp Test: Compile test for Unique handles
|
// VulkanHpp Test: Compile test for Unique handles
|
||||||
|
|
||||||
|
#define VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST
|
||||||
|
|
||||||
#include "../../samples/utils/geometries.hpp"
|
#include "../../samples/utils/geometries.hpp"
|
||||||
#include "../../samples/utils/shaders.hpp"
|
#include "../../samples/utils/shaders.hpp"
|
||||||
#include "../../samples/utils/utils.hpp"
|
#include "../../samples/utils/utils.hpp"
|
||||||
@ -42,7 +44,7 @@ public:
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
vk::UniqueDescriptorSetLayout createDescriptorSetLayoutUnique( vk::Device const & device,
|
vk::UniqueDescriptorSetLayout createDescriptorSetLayoutUnique( vk::UniqueDevice const & device,
|
||||||
std::vector<std::tuple<vk::DescriptorType, uint32_t, vk::ShaderStageFlags>> const & bindingData,
|
std::vector<std::tuple<vk::DescriptorType, uint32_t, vk::ShaderStageFlags>> const & bindingData,
|
||||||
vk::DescriptorSetLayoutCreateFlags flags = {} )
|
vk::DescriptorSetLayoutCreateFlags flags = {} )
|
||||||
{
|
{
|
||||||
@ -52,7 +54,7 @@ vk::UniqueDescriptorSetLayout createDescriptorSetLayoutUnique( vk::Device const
|
|||||||
bindings[i] = vk::DescriptorSetLayoutBinding(
|
bindings[i] = vk::DescriptorSetLayoutBinding(
|
||||||
vk::su::checked_cast<uint32_t>( i ), std::get<0>( bindingData[i] ), std::get<1>( bindingData[i] ), std::get<2>( bindingData[i] ) );
|
vk::su::checked_cast<uint32_t>( i ), std::get<0>( bindingData[i] ), std::get<1>( bindingData[i] ), std::get<2>( bindingData[i] ) );
|
||||||
}
|
}
|
||||||
return device.createDescriptorSetLayoutUnique( vk::DescriptorSetLayoutCreateInfo( flags, bindings ) );
|
return device->createDescriptorSetLayoutUnique( vk::DescriptorSetLayoutCreateInfo( flags, bindings ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
vk::UniqueInstance createInstanceUnique( std::string const & appName,
|
vk::UniqueInstance createInstanceUnique( std::string const & appName,
|
||||||
@ -90,7 +92,32 @@ vk::UniqueInstance createInstanceUnique( std::string const & appNam
|
|||||||
return instance;
|
return instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
vk::UniqueRenderPass createRenderPassUnique( vk::Device const & device,
|
vk::UniqueDevice createDeviceUnique( vk::PhysicalDevice const & physicalDevice,
|
||||||
|
uint32_t queueFamilyIndex,
|
||||||
|
std::vector<std::string> const & extensions,
|
||||||
|
vk::PhysicalDeviceFeatures const * physicalDeviceFeatures = nullptr,
|
||||||
|
void const * pNext = nullptr )
|
||||||
|
{
|
||||||
|
std::vector<char const *> enabledExtensions;
|
||||||
|
enabledExtensions.reserve( extensions.size() );
|
||||||
|
for ( auto const & ext : extensions )
|
||||||
|
{
|
||||||
|
enabledExtensions.push_back( ext.data() );
|
||||||
|
}
|
||||||
|
|
||||||
|
float queuePriority = 0.0f;
|
||||||
|
vk::DeviceQueueCreateInfo deviceQueueCreateInfo( {}, queueFamilyIndex, 1, &queuePriority );
|
||||||
|
vk::DeviceCreateInfo deviceCreateInfo( {}, deviceQueueCreateInfo, {}, enabledExtensions, physicalDeviceFeatures, pNext );
|
||||||
|
|
||||||
|
vk::UniqueDevice device = physicalDevice.createDeviceUnique( deviceCreateInfo );
|
||||||
|
#if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
|
||||||
|
// initialize function pointers for instance
|
||||||
|
VULKAN_HPP_DEFAULT_DISPATCHER.init( *device );
|
||||||
|
#endif
|
||||||
|
return device;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueRenderPass createRenderPassUnique( vk::UniqueDevice const & device,
|
||||||
vk::Format colorFormat,
|
vk::Format colorFormat,
|
||||||
vk::Format depthFormat,
|
vk::Format depthFormat,
|
||||||
vk::AttachmentLoadOp loadOp = vk::AttachmentLoadOp::eClear,
|
vk::AttachmentLoadOp loadOp = vk::AttachmentLoadOp::eClear,
|
||||||
@ -127,10 +154,10 @@ vk::UniqueRenderPass createRenderPassUnique( vk::Device const & device,
|
|||||||
colorAttachment,
|
colorAttachment,
|
||||||
{},
|
{},
|
||||||
( depthFormat != vk::Format::eUndefined ) ? &depthAttachment : nullptr );
|
( depthFormat != vk::Format::eUndefined ) ? &depthAttachment : nullptr );
|
||||||
return device.createRenderPassUnique( vk::RenderPassCreateInfo( vk::RenderPassCreateFlags(), attachmentDescriptions, subpassDescription ) );
|
return device->createRenderPassUnique( vk::RenderPassCreateInfo( vk::RenderPassCreateFlags(), attachmentDescriptions, subpassDescription ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
vk::UniqueShaderModule createShaderModuleUnique( vk::Device const & device, vk::ShaderStageFlagBits shaderStage, std::string const & shaderText )
|
vk::UniqueShaderModule createShaderModuleUnique( vk::UniqueDevice const & device, vk::ShaderStageFlagBits shaderStage, std::string const & shaderText )
|
||||||
{
|
{
|
||||||
std::vector<unsigned int> shaderSPV;
|
std::vector<unsigned int> shaderSPV;
|
||||||
if ( !vk::su::GLSLtoSPV( shaderStage, shaderText, shaderSPV ) )
|
if ( !vk::su::GLSLtoSPV( shaderStage, shaderText, shaderSPV ) )
|
||||||
@ -138,7 +165,49 @@ vk::UniqueShaderModule createShaderModuleUnique( vk::Device const & device, vk::
|
|||||||
throw std::runtime_error( "Could not convert glsl shader to spir-v -> terminating" );
|
throw std::runtime_error( "Could not convert glsl shader to spir-v -> terminating" );
|
||||||
}
|
}
|
||||||
|
|
||||||
return device.createShaderModuleUnique( vk::ShaderModuleCreateInfo( vk::ShaderModuleCreateFlags(), shaderSPV ) );
|
return device->createShaderModuleUnique( vk::ShaderModuleCreateInfo( vk::ShaderModuleCreateFlags(), shaderSPV ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueSwapchainKHR createSwapchainKHRUnique( vk::PhysicalDevice physicalDevice, vk::UniqueDevice const & device, vk::SurfaceKHR surface )
|
||||||
|
{
|
||||||
|
vk::SurfaceCapabilitiesKHR surfaceCapabilities = physicalDevice.getSurfaceCapabilitiesKHR( surface );
|
||||||
|
vk::SurfaceFormatKHR surfaceFormat = vk::su::pickSurfaceFormat( physicalDevice.getSurfaceFormatsKHR( surface ) );
|
||||||
|
vk::Extent2D swapchainExtent;
|
||||||
|
if ( surfaceCapabilities.currentExtent.width == std::numeric_limits<uint32_t>::max() )
|
||||||
|
{
|
||||||
|
// If the surface size is undefined, the size is set to the size of the images requested.
|
||||||
|
swapchainExtent.width = vk::su::clamp<uint32_t>( 64, surfaceCapabilities.minImageExtent.width, surfaceCapabilities.maxImageExtent.width );
|
||||||
|
swapchainExtent.height = vk::su::clamp<uint32_t>( 64, surfaceCapabilities.minImageExtent.height, surfaceCapabilities.maxImageExtent.height );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// If the surface size is defined, the swap chain size must match
|
||||||
|
swapchainExtent = surfaceCapabilities.currentExtent;
|
||||||
|
}
|
||||||
|
vk::SurfaceTransformFlagBitsKHR preTransform = ( surfaceCapabilities.supportedTransforms & vk::SurfaceTransformFlagBitsKHR::eIdentity )
|
||||||
|
? vk::SurfaceTransformFlagBitsKHR::eIdentity
|
||||||
|
: surfaceCapabilities.currentTransform;
|
||||||
|
vk::CompositeAlphaFlagBitsKHR compositeAlpha =
|
||||||
|
( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePreMultiplied ) ? vk::CompositeAlphaFlagBitsKHR::ePreMultiplied
|
||||||
|
: ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::ePostMultiplied ) ? vk::CompositeAlphaFlagBitsKHR::ePostMultiplied
|
||||||
|
: ( surfaceCapabilities.supportedCompositeAlpha & vk::CompositeAlphaFlagBitsKHR::eInherit ) ? vk::CompositeAlphaFlagBitsKHR::eInherit
|
||||||
|
: vk::CompositeAlphaFlagBitsKHR::eOpaque;
|
||||||
|
vk::SwapchainCreateInfoKHR swapChainCreateInfo( {},
|
||||||
|
surface,
|
||||||
|
vk::su::clamp( 3u, surfaceCapabilities.minImageCount, surfaceCapabilities.maxImageCount ),
|
||||||
|
surfaceFormat.format,
|
||||||
|
surfaceFormat.colorSpace,
|
||||||
|
swapchainExtent,
|
||||||
|
1,
|
||||||
|
vk::ImageUsageFlagBits::eColorAttachment,
|
||||||
|
vk::SharingMode::eExclusive,
|
||||||
|
{},
|
||||||
|
preTransform,
|
||||||
|
compositeAlpha,
|
||||||
|
vk::PresentModeKHR::eFifo,
|
||||||
|
true,
|
||||||
|
nullptr );
|
||||||
|
return device->createSwapchainKHRUnique( swapChainCreateInfo );
|
||||||
}
|
}
|
||||||
|
|
||||||
int main( int /*argc*/, char ** /*argv*/ )
|
int main( int /*argc*/, char ** /*argv*/ )
|
||||||
@ -161,25 +230,30 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() );
|
assert( graphicsQueueFamilyIndex < queueFamilyProperties.size() );
|
||||||
|
|
||||||
// create a Device
|
// create a Device
|
||||||
float queuePriority = 0.0f;
|
vk::UniqueDevice device = createDeviceUnique( physicalDevices[0], static_cast<uint32_t>( graphicsQueueFamilyIndex ), vk::su::getDeviceExtensions() );
|
||||||
vk::DeviceQueueCreateInfo deviceQueueCreateInfo( vk::DeviceQueueCreateFlags(), static_cast<uint32_t>( graphicsQueueFamilyIndex ), 1, &queuePriority );
|
|
||||||
vk::DeviceCreateInfo deviceCreateInfo( vk::DeviceCreateFlags(), deviceQueueCreateInfo );
|
|
||||||
vk::UniqueDevice device = physicalDevices[0].createDeviceUnique( deviceCreateInfo );
|
|
||||||
|
|
||||||
// create a PipelineCache
|
// create a PipelineCache
|
||||||
vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() );
|
vk::UniquePipelineCache pipelineCache = device->createPipelineCacheUnique( vk::PipelineCacheCreateInfo() );
|
||||||
|
|
||||||
// get some vk::ShaderModules
|
// get some vk::ShaderModules
|
||||||
glslang::InitializeProcess();
|
glslang::InitializeProcess();
|
||||||
vk::UniqueShaderModule vertexShaderModule = createShaderModuleUnique( *device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C );
|
vk::UniqueShaderModule vertexShaderModule = createShaderModuleUnique( device, vk::ShaderStageFlagBits::eVertex, vertexShaderText_PC_C );
|
||||||
vk::UniqueShaderModule fragmentShaderModule = createShaderModuleUnique( *device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C );
|
vk::UniqueShaderModule fragmentShaderModule = createShaderModuleUnique( device, vk::ShaderStageFlagBits::eFragment, fragmentShaderText_C_C );
|
||||||
glslang::FinalizeProcess();
|
glslang::FinalizeProcess();
|
||||||
|
|
||||||
// initialize an array of vk::PipelineShaderStageCreateInfos
|
// initialize an array of vk::PipelineShaderStageCreateInfos
|
||||||
|
// showing the simplified usage when VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST is defined
|
||||||
|
#if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
||||||
|
vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, vertexShaderModule, "main" ),
|
||||||
|
vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, fragmentShaderModule, "main" )
|
||||||
|
};
|
||||||
|
#else
|
||||||
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
||||||
vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, *vertexShaderModule, "main" ),
|
vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex, *vertexShaderModule, "main" ),
|
||||||
vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, *fragmentShaderModule, "main" )
|
vk::PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment, *fragmentShaderModule, "main" )
|
||||||
};
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( coloredCubeData[0] ) );
|
vk::VertexInputBindingDescription vertexInputBindingDescription( 0, sizeof( coloredCubeData[0] ) );
|
||||||
std::array<vk::VertexInputAttributeDescription, 2> vertexInputAttributeDescriptions = {
|
std::array<vk::VertexInputAttributeDescription, 2> vertexInputAttributeDescriptions = {
|
||||||
@ -247,14 +321,14 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo( vk::PipelineDynamicStateCreateFlags(), dynamicStates );
|
vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo( vk::PipelineDynamicStateCreateFlags(), dynamicStates );
|
||||||
|
|
||||||
vk::UniqueDescriptorSetLayout descriptorSetLayout =
|
vk::UniqueDescriptorSetLayout descriptorSetLayout =
|
||||||
createDescriptorSetLayoutUnique( *device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
createDescriptorSetLayoutUnique( device, { { vk::DescriptorType::eUniformBuffer, 1, vk::ShaderStageFlagBits::eVertex } } );
|
||||||
vk::UniquePipelineLayout pipelineLayout =
|
vk::UniquePipelineLayout pipelineLayout =
|
||||||
device->createPipelineLayoutUnique( vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), *descriptorSetLayout ) );
|
device->createPipelineLayoutUnique( vk::PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags(), *descriptorSetLayout ) );
|
||||||
|
|
||||||
vk::su::SurfaceData surfaceData( *instance, AppName, vk::Extent2D( 500, 500 ) );
|
vk::su::SurfaceData surfaceData( *instance, AppName, vk::Extent2D( 500, 500 ) );
|
||||||
|
|
||||||
vk::UniqueRenderPass renderPass = createRenderPassUnique(
|
vk::UniqueRenderPass renderPass = createRenderPassUnique(
|
||||||
*device, vk::su::pickSurfaceFormat( physicalDevices[0].getSurfaceFormatsKHR( surfaceData.surface ) ).format, vk::Format::eD16Unorm );
|
device, vk::su::pickSurfaceFormat( physicalDevices[0].getSurfaceFormatsKHR( surfaceData.surface ) ).format, vk::Format::eD16Unorm );
|
||||||
|
|
||||||
// initialize the vk::GraphicsPipelineCreateInfo
|
// initialize the vk::GraphicsPipelineCreateInfo
|
||||||
vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo( vk::PipelineCreateFlags(), // flags
|
vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo( vk::PipelineCreateFlags(), // flags
|
||||||
@ -289,11 +363,15 @@ int main( int /*argc*/, char ** /*argv*/ )
|
|||||||
vk::UniquePipeline graphicsPipeline3 =
|
vk::UniquePipeline graphicsPipeline3 =
|
||||||
std::move( device->createGraphicsPipelinesUnique<vk::DispatchLoaderDynamic>( *pipelineCache, graphicsPipelineCreateInfo ).value[0] );
|
std::move( device->createGraphicsPipelinesUnique<vk::DispatchLoaderDynamic>( *pipelineCache, graphicsPipelineCreateInfo ).value[0] );
|
||||||
|
|
||||||
std::vector<vk::UniqueDescriptorSet> descriptorSets = device->allocateDescriptorSetsUnique( {} );
|
vk::DescriptorPoolSize poolSize( vk::DescriptorType::eUniformBuffer, 1 );
|
||||||
|
vk::UniqueDescriptorPool descriptorPool = device->createDescriptorPoolUnique( { vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, 1, poolSize } );
|
||||||
|
|
||||||
vk::UniqueSwapchainKHR swapchain = device->createSharedSwapchainKHRUnique( {} );
|
std::vector<vk::UniqueDescriptorSet> descriptorSets = device->allocateDescriptorSetsUnique( { *descriptorPool, *descriptorSetLayout } );
|
||||||
|
|
||||||
// destroy the non-Unique surface used here
|
vk::UniqueSwapchainKHR swapchain = createSwapchainKHRUnique( physicalDevices[0], device, surfaceData.surface );
|
||||||
|
|
||||||
|
// destroy the non-Unique surface used here, but swapchain needs to be destroyed first
|
||||||
|
swapchain.reset();
|
||||||
instance->destroySurfaceKHR( surfaceData.surface );
|
instance->destroySurfaceKHR( surfaceData.surface );
|
||||||
}
|
}
|
||||||
catch ( vk::SystemError & err )
|
catch ( vk::SystemError & err )
|
||||||
|
@ -919,6 +919,13 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_value.operator bool();
|
return m_value.operator bool();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
operator Type() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_value;
|
||||||
|
}
|
||||||
|
# endif
|
||||||
|
|
||||||
Type const * operator->() const VULKAN_HPP_NOEXCEPT
|
Type const * operator->() const VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
return &m_value;
|
return &m_value;
|
||||||
|
@ -2833,6 +2833,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_instance;
|
return m_instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Instance() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_instance;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_instance )
|
if ( m_instance )
|
||||||
@ -3100,6 +3105,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_physicalDevice;
|
return m_physicalDevice;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PhysicalDevice() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_physicalDevice;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
m_physicalDevice = nullptr;
|
m_physicalDevice = nullptr;
|
||||||
@ -3561,6 +3571,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_device;
|
return m_device;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Device() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_device;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_device )
|
if ( m_device )
|
||||||
@ -4584,7 +4599,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkAccelerationStructureKHR accelerationStructure,
|
VkAccelerationStructureKHR accelerationStructure,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_accelerationStructure( accelerationStructure )
|
, m_accelerationStructure( accelerationStructure )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4628,6 +4643,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_accelerationStructure;
|
return m_accelerationStructure;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::AccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_accelerationStructure;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_accelerationStructure )
|
if ( m_accelerationStructure )
|
||||||
@ -4699,7 +4719,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkAccelerationStructureNV accelerationStructure,
|
VkAccelerationStructureNV accelerationStructure,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_accelerationStructure( accelerationStructure )
|
, m_accelerationStructure( accelerationStructure )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4743,6 +4763,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_accelerationStructure;
|
return m_accelerationStructure;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::AccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_accelerationStructure;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_accelerationStructure )
|
if ( m_accelerationStructure )
|
||||||
@ -4822,7 +4847,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkBuffer buffer,
|
VkBuffer buffer,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_buffer( buffer )
|
, m_buffer( buffer )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4866,6 +4891,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_buffer;
|
return m_buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Buffer() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_buffer;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_buffer )
|
if ( m_buffer )
|
||||||
@ -4943,7 +4973,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkBufferCollectionFUCHSIA collection,
|
VkBufferCollectionFUCHSIA collection,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_collection( collection )
|
, m_collection( collection )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4987,6 +5017,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_collection;
|
return m_collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_collection;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_collection )
|
if ( m_collection )
|
||||||
@ -5067,7 +5102,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkBufferView bufferView,
|
VkBufferView bufferView,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_bufferView( bufferView )
|
, m_bufferView( bufferView )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5111,6 +5146,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_bufferView;
|
return m_bufferView;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::BufferView() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_bufferView;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_bufferView )
|
if ( m_bufferView )
|
||||||
@ -5181,7 +5221,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkCommandPool commandPool,
|
VkCommandPool commandPool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_commandPool( commandPool )
|
, m_commandPool( commandPool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5225,6 +5265,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_commandPool;
|
return m_commandPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CommandPool() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_commandPool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_commandPool )
|
if ( m_commandPool )
|
||||||
@ -5296,7 +5341,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
|
CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
|
||||||
: m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
|
: m_device( device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -5337,6 +5382,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_commandBuffer;
|
return m_commandBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CommandBuffer() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_commandBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_commandBuffer )
|
if ( m_commandBuffer )
|
||||||
@ -6362,7 +6412,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkCuFunctionNVX function,
|
VkCuFunctionNVX function,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_function( function )
|
, m_function( function )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -6406,6 +6456,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_function;
|
return m_function;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CuFunctionNVX() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_function;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_function )
|
if ( m_function )
|
||||||
@ -6476,7 +6531,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkCuModuleNVX module,
|
VkCuModuleNVX module,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_module( module )
|
, m_module( module )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -6520,6 +6575,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_module;
|
return m_module;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CuModuleNVX() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_module;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_module )
|
if ( m_module )
|
||||||
@ -6591,7 +6651,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkCudaFunctionNV function,
|
VkCudaFunctionNV function,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_function( function )
|
, m_function( function )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -6635,6 +6695,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_function;
|
return m_function;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CudaFunctionNV() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_function;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_function )
|
if ( m_function )
|
||||||
@ -6707,7 +6772,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkCudaModuleNV module,
|
VkCudaModuleNV module,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_module( module )
|
, m_module( module )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -6751,6 +6816,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_module;
|
return m_module;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CudaModuleNV() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_module;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_module )
|
if ( m_module )
|
||||||
@ -6826,7 +6896,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
||||||
VkDebugReportCallbackEXT callback,
|
VkDebugReportCallbackEXT callback,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_instance( *instance )
|
: m_instance( instance )
|
||||||
, m_callback( callback )
|
, m_callback( callback )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( instance.getDispatcher() )
|
, m_dispatcher( instance.getDispatcher() )
|
||||||
@ -6870,6 +6940,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_callback;
|
return m_callback;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_callback;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_callback )
|
if ( m_callback )
|
||||||
@ -6941,7 +7016,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
||||||
VkDebugUtilsMessengerEXT messenger,
|
VkDebugUtilsMessengerEXT messenger,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_instance( *instance )
|
: m_instance( instance )
|
||||||
, m_messenger( messenger )
|
, m_messenger( messenger )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( instance.getDispatcher() )
|
, m_dispatcher( instance.getDispatcher() )
|
||||||
@ -6985,6 +7060,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_messenger;
|
return m_messenger;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_messenger;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_messenger )
|
if ( m_messenger )
|
||||||
@ -7055,7 +7135,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDeferredOperationKHR operation,
|
VkDeferredOperationKHR operation,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_operation( operation )
|
, m_operation( operation )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -7099,6 +7179,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_operation;
|
return m_operation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_operation;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_operation )
|
if ( m_operation )
|
||||||
@ -7178,7 +7263,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDescriptorPool descriptorPool,
|
VkDescriptorPool descriptorPool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_descriptorPool( descriptorPool )
|
, m_descriptorPool( descriptorPool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -7222,6 +7307,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorPool;
|
return m_descriptorPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorPool() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorPool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorPool )
|
if ( m_descriptorPool )
|
||||||
@ -7286,7 +7376,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
|
DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
|
||||||
: m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
|
: m_device( device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -7327,6 +7417,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorSet;
|
return m_descriptorSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorSet() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorSet;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorSet )
|
if ( m_descriptorSet )
|
||||||
@ -7439,7 +7534,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDescriptorSetLayout descriptorSetLayout,
|
VkDescriptorSetLayout descriptorSetLayout,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_descriptorSetLayout( descriptorSetLayout )
|
, m_descriptorSetLayout( descriptorSetLayout )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -7483,6 +7578,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorSetLayout;
|
return m_descriptorSetLayout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorSetLayout;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorSetLayout )
|
if ( m_descriptorSetLayout )
|
||||||
@ -7560,7 +7660,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
|
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_descriptorUpdateTemplate( descriptorUpdateTemplate )
|
, m_descriptorUpdateTemplate( descriptorUpdateTemplate )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -7604,6 +7704,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorUpdateTemplate;
|
return m_descriptorUpdateTemplate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorUpdateTemplate;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorUpdateTemplate )
|
if ( m_descriptorUpdateTemplate )
|
||||||
@ -7675,7 +7780,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDeviceMemory memory,
|
VkDeviceMemory memory,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_memory( memory )
|
, m_memory( memory )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -7719,6 +7824,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_memory;
|
return m_memory;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DeviceMemory() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_memory;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_memory )
|
if ( m_memory )
|
||||||
@ -7823,7 +7933,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
# endif
|
# endif
|
||||||
|
|
||||||
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
|
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
|
||||||
: m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
|
: m_physicalDevice( physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -7862,6 +7972,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_display;
|
return m_display;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DisplayKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_display;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_display )
|
if ( m_display )
|
||||||
@ -8014,6 +8129,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_displayModeKHR;
|
return m_displayModeKHR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DisplayModeKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_displayModeKHR;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
m_physicalDevice = nullptr;
|
m_physicalDevice = nullptr;
|
||||||
@ -8074,7 +8194,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkEvent event,
|
VkEvent event,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_event( event )
|
, m_event( event )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8118,6 +8238,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_event;
|
return m_event;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Event() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_event;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_event )
|
if ( m_event )
|
||||||
@ -8215,7 +8340,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkFence fence,
|
VkFence fence,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_fence( fence )
|
, m_fence( fence )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8259,6 +8384,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_fence;
|
return m_fence;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Fence() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_fence;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_fence )
|
if ( m_fence )
|
||||||
@ -8333,7 +8463,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkFramebuffer framebuffer,
|
VkFramebuffer framebuffer,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_framebuffer( framebuffer )
|
, m_framebuffer( framebuffer )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8377,6 +8507,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_framebuffer;
|
return m_framebuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Framebuffer() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_framebuffer;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_framebuffer )
|
if ( m_framebuffer )
|
||||||
@ -8451,7 +8586,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkImage image,
|
VkImage image,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_image( image )
|
, m_image( image )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8495,6 +8630,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_image;
|
return m_image;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Image() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_image;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_image )
|
if ( m_image )
|
||||||
@ -8598,7 +8738,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkImageView imageView,
|
VkImageView imageView,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_imageView( imageView )
|
, m_imageView( imageView )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8642,6 +8782,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_imageView;
|
return m_imageView;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::ImageView() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_imageView;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_imageView )
|
if ( m_imageView )
|
||||||
@ -8716,7 +8861,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkIndirectCommandsLayoutNV indirectCommandsLayout,
|
VkIndirectCommandsLayoutNV indirectCommandsLayout,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_indirectCommandsLayout( indirectCommandsLayout )
|
, m_indirectCommandsLayout( indirectCommandsLayout )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8760,6 +8905,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_indirectCommandsLayout;
|
return m_indirectCommandsLayout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_indirectCommandsLayout;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_indirectCommandsLayout )
|
if ( m_indirectCommandsLayout )
|
||||||
@ -8831,7 +8981,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkMicromapEXT micromap,
|
VkMicromapEXT micromap,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_micromap( micromap )
|
, m_micromap( micromap )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8875,6 +9025,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_micromap;
|
return m_micromap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::MicromapEXT() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_micromap;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_micromap )
|
if ( m_micromap )
|
||||||
@ -8945,7 +9100,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkOpticalFlowSessionNV session,
|
VkOpticalFlowSessionNV session,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_session( session )
|
, m_session( session )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -8989,6 +9144,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_session;
|
return m_session;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_session;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_session )
|
if ( m_session )
|
||||||
@ -9063,7 +9223,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
# endif
|
# endif
|
||||||
|
|
||||||
PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration )
|
PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration )
|
||||||
: m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() )
|
: m_device( device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9102,6 +9262,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_configuration;
|
return m_configuration;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_configuration;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_configuration )
|
if ( m_configuration )
|
||||||
@ -9168,7 +9333,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkPipelineCache pipelineCache,
|
VkPipelineCache pipelineCache,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_pipelineCache( pipelineCache )
|
, m_pipelineCache( pipelineCache )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -9212,6 +9377,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_pipelineCache;
|
return m_pipelineCache;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PipelineCache() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_pipelineCache;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_pipelineCache )
|
if ( m_pipelineCache )
|
||||||
@ -9334,7 +9504,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
VkPipeline pipeline,
|
VkPipeline pipeline,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
|
||||||
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_pipeline( pipeline )
|
, m_pipeline( pipeline )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_constructorSuccessCode( successCode )
|
, m_constructorSuccessCode( successCode )
|
||||||
@ -9381,6 +9551,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_pipeline;
|
return m_pipeline;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Pipeline() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_pipeline;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_pipeline )
|
if ( m_pipeline )
|
||||||
@ -9572,7 +9747,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkPipelineLayout pipelineLayout,
|
VkPipelineLayout pipelineLayout,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_pipelineLayout( pipelineLayout )
|
, m_pipelineLayout( pipelineLayout )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -9616,6 +9791,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_pipelineLayout;
|
return m_pipelineLayout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PipelineLayout() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_pipelineLayout;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_pipelineLayout )
|
if ( m_pipelineLayout )
|
||||||
@ -9687,7 +9867,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkPrivateDataSlot privateDataSlot,
|
VkPrivateDataSlot privateDataSlot,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_privateDataSlot( privateDataSlot )
|
, m_privateDataSlot( privateDataSlot )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -9731,6 +9911,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_privateDataSlot;
|
return m_privateDataSlot;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PrivateDataSlot() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_privateDataSlot;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_privateDataSlot )
|
if ( m_privateDataSlot )
|
||||||
@ -9802,7 +9987,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkQueryPool queryPool,
|
VkQueryPool queryPool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_queryPool( queryPool )
|
, m_queryPool( queryPool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -9846,6 +10031,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_queryPool;
|
return m_queryPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::QueryPool() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_queryPool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_queryPool )
|
if ( m_queryPool )
|
||||||
@ -9986,6 +10176,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_queue;
|
return m_queue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Queue() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_queue;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
m_queue = nullptr;
|
m_queue = nullptr;
|
||||||
@ -10093,7 +10288,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkRenderPass renderPass,
|
VkRenderPass renderPass,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_renderPass( renderPass )
|
, m_renderPass( renderPass )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -10137,6 +10332,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_renderPass;
|
return m_renderPass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::RenderPass() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_renderPass;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_renderPass )
|
if ( m_renderPass )
|
||||||
@ -10215,7 +10415,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSampler sampler,
|
VkSampler sampler,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_sampler( sampler )
|
, m_sampler( sampler )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -10259,6 +10459,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_sampler;
|
return m_sampler;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Sampler() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_sampler;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_sampler )
|
if ( m_sampler )
|
||||||
@ -10329,7 +10534,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSamplerYcbcrConversion ycbcrConversion,
|
VkSamplerYcbcrConversion ycbcrConversion,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_ycbcrConversion( ycbcrConversion )
|
, m_ycbcrConversion( ycbcrConversion )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -10373,6 +10578,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_ycbcrConversion;
|
return m_ycbcrConversion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_ycbcrConversion;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_ycbcrConversion )
|
if ( m_ycbcrConversion )
|
||||||
@ -10444,7 +10654,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSemaphore semaphore,
|
VkSemaphore semaphore,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_semaphore( semaphore )
|
, m_semaphore( semaphore )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -10488,6 +10698,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_semaphore;
|
return m_semaphore;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Semaphore() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_semaphore;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_semaphore )
|
if ( m_semaphore )
|
||||||
@ -10566,7 +10781,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkShaderEXT shader,
|
VkShaderEXT shader,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_shader( shader )
|
, m_shader( shader )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -10610,6 +10825,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_shader;
|
return m_shader;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::ShaderEXT() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_shader;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_shader )
|
if ( m_shader )
|
||||||
@ -10711,7 +10931,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkShaderModule shaderModule,
|
VkShaderModule shaderModule,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_shaderModule( shaderModule )
|
, m_shaderModule( shaderModule )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -10755,6 +10975,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_shaderModule;
|
return m_shaderModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::ShaderModule() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_shaderModule;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_shaderModule )
|
if ( m_shaderModule )
|
||||||
@ -10981,7 +11206,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
||||||
VkSurfaceKHR surface,
|
VkSurfaceKHR surface,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_instance( *instance )
|
: m_instance( instance )
|
||||||
, m_surface( surface )
|
, m_surface( surface )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( instance.getDispatcher() )
|
, m_dispatcher( instance.getDispatcher() )
|
||||||
@ -11025,6 +11250,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_surface;
|
return m_surface;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SurfaceKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_surface;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_surface )
|
if ( m_surface )
|
||||||
@ -11095,7 +11325,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSwapchainKHR swapchain,
|
VkSwapchainKHR swapchain,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_swapchain( swapchain )
|
, m_swapchain( swapchain )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -11139,6 +11369,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_swapchain;
|
return m_swapchain;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SwapchainKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_swapchain;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_swapchain )
|
if ( m_swapchain )
|
||||||
@ -11285,7 +11520,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkValidationCacheEXT validationCache,
|
VkValidationCacheEXT validationCache,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_validationCache( validationCache )
|
, m_validationCache( validationCache )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -11329,6 +11564,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_validationCache;
|
return m_validationCache;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::ValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_validationCache;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_validationCache )
|
if ( m_validationCache )
|
||||||
@ -11406,7 +11646,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkVideoSessionKHR videoSession,
|
VkVideoSessionKHR videoSession,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_videoSession( videoSession )
|
, m_videoSession( videoSession )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -11450,6 +11690,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_videoSession;
|
return m_videoSession;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::VideoSessionKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_videoSession;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_videoSession )
|
if ( m_videoSession )
|
||||||
@ -11527,7 +11772,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkVideoSessionParametersKHR videoSessionParameters,
|
VkVideoSessionParametersKHR videoSessionParameters,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_videoSessionParameters( videoSessionParameters )
|
, m_videoSessionParameters( videoSessionParameters )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -11571,6 +11816,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_videoSessionParameters;
|
return m_videoSessionParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_videoSessionParameters;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_videoSessionParameters )
|
if ( m_videoSessionParameters )
|
||||||
|
@ -182,6 +182,13 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return bool( m_handle );
|
return bool( m_handle );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
operator HandleType() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_handle;
|
||||||
|
}
|
||||||
|
# endif
|
||||||
|
|
||||||
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
|
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
return &m_handle;
|
return &m_handle;
|
||||||
|
@ -919,6 +919,13 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_value.operator bool();
|
return m_value.operator bool();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
operator Type() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_value;
|
||||||
|
}
|
||||||
|
# endif
|
||||||
|
|
||||||
Type const * operator->() const VULKAN_HPP_NOEXCEPT
|
Type const * operator->() const VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
return &m_value;
|
return &m_value;
|
||||||
|
@ -1256,6 +1256,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_instance;
|
return m_instance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Instance() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_instance;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_instance )
|
if ( m_instance )
|
||||||
@ -1386,6 +1391,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_physicalDevice;
|
return m_physicalDevice;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PhysicalDevice() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_physicalDevice;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
m_physicalDevice = nullptr;
|
m_physicalDevice = nullptr;
|
||||||
@ -1676,6 +1686,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_device;
|
return m_device;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Device() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_device;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_device )
|
if ( m_device )
|
||||||
@ -2091,7 +2106,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkBuffer buffer,
|
VkBuffer buffer,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_buffer( buffer )
|
, m_buffer( buffer )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -2135,6 +2150,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_buffer;
|
return m_buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Buffer() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_buffer;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_buffer )
|
if ( m_buffer )
|
||||||
@ -2211,7 +2231,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkBufferView bufferView,
|
VkBufferView bufferView,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_bufferView( bufferView )
|
, m_bufferView( bufferView )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -2255,6 +2275,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_bufferView;
|
return m_bufferView;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::BufferView() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_bufferView;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_bufferView )
|
if ( m_bufferView )
|
||||||
@ -2325,7 +2350,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkCommandPool commandPool,
|
VkCommandPool commandPool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_commandPool( commandPool )
|
, m_commandPool( commandPool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -2369,6 +2394,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_commandPool;
|
return m_commandPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CommandPool() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_commandPool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_commandPool )
|
if ( m_commandPool )
|
||||||
@ -2437,7 +2467,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
|
CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
|
||||||
: m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
|
: m_device( device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2478,6 +2508,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_commandBuffer;
|
return m_commandBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::CommandBuffer() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_commandBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_commandBuffer )
|
if ( m_commandBuffer )
|
||||||
@ -2976,7 +3011,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
||||||
VkDebugUtilsMessengerEXT messenger,
|
VkDebugUtilsMessengerEXT messenger,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_instance( *instance )
|
: m_instance( instance )
|
||||||
, m_messenger( messenger )
|
, m_messenger( messenger )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( instance.getDispatcher() )
|
, m_dispatcher( instance.getDispatcher() )
|
||||||
@ -3020,6 +3055,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_messenger;
|
return m_messenger;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_messenger;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_messenger )
|
if ( m_messenger )
|
||||||
@ -3091,7 +3131,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDescriptorPool descriptorPool,
|
VkDescriptorPool descriptorPool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_descriptorPool( descriptorPool )
|
, m_descriptorPool( descriptorPool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -3135,6 +3175,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorPool;
|
return m_descriptorPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorPool() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorPool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorPool )
|
if ( m_descriptorPool )
|
||||||
@ -3199,7 +3244,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
|
DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
|
||||||
: m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
|
: m_device( device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3240,6 +3285,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorSet;
|
return m_descriptorSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorSet() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorSet;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorSet )
|
if ( m_descriptorSet )
|
||||||
@ -3338,7 +3388,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDescriptorSetLayout descriptorSetLayout,
|
VkDescriptorSetLayout descriptorSetLayout,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_descriptorSetLayout( descriptorSetLayout )
|
, m_descriptorSetLayout( descriptorSetLayout )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -3382,6 +3432,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_descriptorSetLayout;
|
return m_descriptorSetLayout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_descriptorSetLayout;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_descriptorSetLayout )
|
if ( m_descriptorSetLayout )
|
||||||
@ -3453,7 +3508,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkDeviceMemory memory,
|
VkDeviceMemory memory,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_memory( memory )
|
, m_memory( memory )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -3497,6 +3552,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_memory;
|
return m_memory;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DeviceMemory() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_memory;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_memory )
|
if ( m_memory )
|
||||||
@ -3566,7 +3626,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
|
DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
|
||||||
: m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
|
: m_physicalDevice( physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3605,6 +3665,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_display;
|
return m_display;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DisplayKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_display;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_display )
|
if ( m_display )
|
||||||
@ -3751,6 +3816,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_displayModeKHR;
|
return m_displayModeKHR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::DisplayModeKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_displayModeKHR;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
m_physicalDevice = nullptr;
|
m_physicalDevice = nullptr;
|
||||||
@ -3811,7 +3881,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkEvent event,
|
VkEvent event,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_event( event )
|
, m_event( event )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -3855,6 +3925,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_event;
|
return m_event;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Event() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_event;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_event )
|
if ( m_event )
|
||||||
@ -3952,7 +4027,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkFence fence,
|
VkFence fence,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_fence( fence )
|
, m_fence( fence )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -3996,6 +4071,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_fence;
|
return m_fence;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Fence() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_fence;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_fence )
|
if ( m_fence )
|
||||||
@ -4070,7 +4150,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkFramebuffer framebuffer,
|
VkFramebuffer framebuffer,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_framebuffer( framebuffer )
|
, m_framebuffer( framebuffer )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4114,6 +4194,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_framebuffer;
|
return m_framebuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Framebuffer() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_framebuffer;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_framebuffer )
|
if ( m_framebuffer )
|
||||||
@ -4184,7 +4269,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkImage image,
|
VkImage image,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_image( image )
|
, m_image( image )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4228,6 +4313,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_image;
|
return m_image;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Image() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_image;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_image )
|
if ( m_image )
|
||||||
@ -4311,7 +4401,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkImageView imageView,
|
VkImageView imageView,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_imageView( imageView )
|
, m_imageView( imageView )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4355,6 +4445,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_imageView;
|
return m_imageView;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::ImageView() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_imageView;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_imageView )
|
if ( m_imageView )
|
||||||
@ -4425,7 +4520,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkPipelineCache pipelineCache,
|
VkPipelineCache pipelineCache,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_pipelineCache( pipelineCache )
|
, m_pipelineCache( pipelineCache )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4469,6 +4564,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_pipelineCache;
|
return m_pipelineCache;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PipelineCache() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_pipelineCache;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_pipelineCache )
|
if ( m_pipelineCache )
|
||||||
@ -4574,7 +4674,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
VkPipeline pipeline,
|
VkPipeline pipeline,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
|
||||||
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_pipeline( pipeline )
|
, m_pipeline( pipeline )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_constructorSuccessCode( successCode )
|
, m_constructorSuccessCode( successCode )
|
||||||
@ -4621,6 +4721,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_pipeline;
|
return m_pipeline;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Pipeline() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_pipeline;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_pipeline )
|
if ( m_pipeline )
|
||||||
@ -4738,7 +4843,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkPipelineLayout pipelineLayout,
|
VkPipelineLayout pipelineLayout,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_pipelineLayout( pipelineLayout )
|
, m_pipelineLayout( pipelineLayout )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4782,6 +4887,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_pipelineLayout;
|
return m_pipelineLayout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PipelineLayout() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_pipelineLayout;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_pipelineLayout )
|
if ( m_pipelineLayout )
|
||||||
@ -4853,7 +4963,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkPrivateDataSlot privateDataSlot,
|
VkPrivateDataSlot privateDataSlot,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_privateDataSlot( privateDataSlot )
|
, m_privateDataSlot( privateDataSlot )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -4897,6 +5007,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_privateDataSlot;
|
return m_privateDataSlot;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::PrivateDataSlot() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_privateDataSlot;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_privateDataSlot )
|
if ( m_privateDataSlot )
|
||||||
@ -4968,7 +5083,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkQueryPool queryPool,
|
VkQueryPool queryPool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_queryPool( queryPool )
|
, m_queryPool( queryPool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5012,6 +5127,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_queryPool;
|
return m_queryPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::QueryPool() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_queryPool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_queryPool )
|
if ( m_queryPool )
|
||||||
@ -5148,6 +5268,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_queue;
|
return m_queue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Queue() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_queue;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
m_queue = nullptr;
|
m_queue = nullptr;
|
||||||
@ -5240,7 +5365,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkRenderPass renderPass,
|
VkRenderPass renderPass,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_renderPass( renderPass )
|
, m_renderPass( renderPass )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5284,6 +5409,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_renderPass;
|
return m_renderPass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::RenderPass() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_renderPass;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_renderPass )
|
if ( m_renderPass )
|
||||||
@ -5358,7 +5488,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSampler sampler,
|
VkSampler sampler,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_sampler( sampler )
|
, m_sampler( sampler )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5402,6 +5532,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_sampler;
|
return m_sampler;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Sampler() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_sampler;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_sampler )
|
if ( m_sampler )
|
||||||
@ -5472,7 +5607,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSamplerYcbcrConversion ycbcrConversion,
|
VkSamplerYcbcrConversion ycbcrConversion,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_ycbcrConversion( ycbcrConversion )
|
, m_ycbcrConversion( ycbcrConversion )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5516,6 +5651,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_ycbcrConversion;
|
return m_ycbcrConversion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_ycbcrConversion;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_ycbcrConversion )
|
if ( m_ycbcrConversion )
|
||||||
@ -5587,7 +5727,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSemaphore semaphore,
|
VkSemaphore semaphore,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_semaphore( semaphore )
|
, m_semaphore( semaphore )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5631,6 +5771,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_semaphore;
|
return m_semaphore;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::Semaphore() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_semaphore;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_semaphore )
|
if ( m_semaphore )
|
||||||
@ -5706,7 +5851,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SemaphoreSciSyncPoolNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
SemaphoreSciSyncPoolNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSemaphoreSciSyncPoolNV semaphorePool,
|
VkSemaphoreSciSyncPoolNV semaphorePool,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_semaphorePool( semaphorePool )
|
, m_semaphorePool( semaphorePool )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5750,6 +5895,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_semaphorePool;
|
return m_semaphorePool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_semaphorePool;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_semaphorePool )
|
if ( m_semaphorePool )
|
||||||
@ -5822,7 +5972,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkShaderModule shaderModule,
|
VkShaderModule shaderModule,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_shaderModule( shaderModule )
|
, m_shaderModule( shaderModule )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -5866,6 +6016,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_shaderModule;
|
return m_shaderModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::ShaderModule() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_shaderModule;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_shaderModule )
|
if ( m_shaderModule )
|
||||||
@ -5945,7 +6100,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
|
||||||
VkSurfaceKHR surface,
|
VkSurfaceKHR surface,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_instance( *instance )
|
: m_instance( instance )
|
||||||
, m_surface( surface )
|
, m_surface( surface )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( instance.getDispatcher() )
|
, m_dispatcher( instance.getDispatcher() )
|
||||||
@ -5989,6 +6144,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_surface;
|
return m_surface;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SurfaceKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_surface;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_surface )
|
if ( m_surface )
|
||||||
@ -6059,7 +6219,7 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
|
||||||
VkSwapchainKHR swapchain,
|
VkSwapchainKHR swapchain,
|
||||||
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
|
||||||
: m_device( *device )
|
: m_device( device )
|
||||||
, m_swapchain( swapchain )
|
, m_swapchain( swapchain )
|
||||||
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
, m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
|
||||||
, m_dispatcher( device.getDispatcher() )
|
, m_dispatcher( device.getDispatcher() )
|
||||||
@ -6103,6 +6263,11 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return m_swapchain;
|
return m_swapchain;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
operator VULKAN_HPP_NAMESPACE::SwapchainKHR() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_swapchain;
|
||||||
|
}
|
||||||
|
|
||||||
void clear() VULKAN_HPP_NOEXCEPT
|
void clear() VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
if ( m_swapchain )
|
if ( m_swapchain )
|
||||||
|
@ -182,6 +182,13 @@ namespace VULKAN_HPP_NAMESPACE
|
|||||||
return bool( m_handle );
|
return bool( m_handle );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST )
|
||||||
|
operator HandleType() const VULKAN_HPP_NOEXCEPT
|
||||||
|
{
|
||||||
|
return m_handle;
|
||||||
|
}
|
||||||
|
# endif
|
||||||
|
|
||||||
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
|
const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
|
||||||
{
|
{
|
||||||
return &m_handle;
|
return &m_handle;
|
||||||
|
Loading…
Reference in New Issue
Block a user