2019-06-25 07:47:27 +00:00
|
|
|
#pragma once
|
|
|
|
|
2019-03-05 07:59:40 +00:00
|
|
|
// Copyright(c) 2019, NVIDIA CORPORATION. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
//
|
|
|
|
|
2022-08-10 09:19:06 +00:00
|
|
|
#include <vulkan/vulkan.hpp>
|
2020-01-28 09:16:10 +00:00
|
|
|
|
|
|
|
#define GLFW_INCLUDE_NONE
|
|
|
|
#include <GLFW/glfw3.h>
|
2019-03-05 07:59:40 +00:00
|
|
|
#include <iostream>
|
2021-07-22 21:30:56 +00:00
|
|
|
#include <limits>
|
2022-02-28 09:11:04 +00:00
|
|
|
#include <map>
|
2022-08-10 09:19:06 +00:00
|
|
|
#include <memory> // std::unique_ptr
|
2019-03-05 07:59:40 +00:00
|
|
|
|
|
|
|
namespace vk
|
|
|
|
{
|
|
|
|
namespace su
|
|
|
|
{
|
|
|
|
const uint64_t FenceTimeout = 100000000;
|
|
|
|
|
2020-01-28 09:16:10 +00:00
|
|
|
template <typename Func>
|
2021-02-17 09:49:59 +00:00
|
|
|
void oneTimeSubmit( vk::CommandBuffer const & commandBuffer, vk::Queue const & queue, Func const & func )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
2021-02-17 09:49:59 +00:00
|
|
|
commandBuffer.begin( vk::CommandBufferBeginInfo( vk::CommandBufferUsageFlagBits::eOneTimeSubmit ) );
|
2020-04-12 19:49:12 +00:00
|
|
|
func( commandBuffer );
|
2021-02-17 09:49:59 +00:00
|
|
|
commandBuffer.end();
|
|
|
|
queue.submit( vk::SubmitInfo( 0, nullptr, nullptr, 1, &commandBuffer ), nullptr );
|
2020-01-28 09:16:10 +00:00
|
|
|
queue.waitIdle();
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename Func>
|
2022-02-28 09:11:04 +00:00
|
|
|
void oneTimeSubmit( vk::Device const & device, vk::CommandPool const & commandPool, vk::Queue const & queue, Func const & func )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::CommandBuffer commandBuffer =
|
2022-02-28 09:11:04 +00:00
|
|
|
device.allocateCommandBuffers( vk::CommandBufferAllocateInfo( commandPool, vk::CommandBufferLevel::ePrimary, 1 ) ).front();
|
2020-04-12 19:49:12 +00:00
|
|
|
oneTimeSubmit( commandBuffer, queue, func );
|
2020-01-28 09:16:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class T>
|
2022-02-28 09:11:04 +00:00
|
|
|
void copyToDevice( vk::Device const & device, vk::DeviceMemory const & deviceMemory, T const * pData, size_t count, vk::DeviceSize stride = sizeof( T ) )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
assert( sizeof( T ) <= stride );
|
2021-02-17 09:49:59 +00:00
|
|
|
uint8_t * deviceData = static_cast<uint8_t *>( device.mapMemory( deviceMemory, 0, count * stride ) );
|
2020-04-12 19:49:12 +00:00
|
|
|
if ( stride == sizeof( T ) )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
memcpy( deviceData, pData, count * sizeof( T ) );
|
2020-01-28 09:16:10 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
for ( size_t i = 0; i < count; i++ )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
memcpy( deviceData, &pData[i], sizeof( T ) );
|
2020-01-28 09:16:10 +00:00
|
|
|
deviceData += stride;
|
|
|
|
}
|
|
|
|
}
|
2021-02-17 09:49:59 +00:00
|
|
|
device.unmapMemory( deviceMemory );
|
2020-01-28 09:16:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class T>
|
2021-02-17 09:49:59 +00:00
|
|
|
void copyToDevice( vk::Device const & device, vk::DeviceMemory const & deviceMemory, T const & data )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
2021-02-17 09:49:59 +00:00
|
|
|
copyToDevice<T>( device, deviceMemory, &data, 1 );
|
2020-01-28 09:16:10 +00:00
|
|
|
}
|
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
template <class T>
|
|
|
|
VULKAN_HPP_INLINE constexpr const T & clamp( const T & v, const T & lo, const T & hi )
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
|
|
|
return v < lo ? lo : hi < v ? hi : v;
|
|
|
|
}
|
|
|
|
|
2022-02-28 09:11:04 +00:00
|
|
|
void setImageLayout(
|
|
|
|
vk::CommandBuffer const & commandBuffer, vk::Image image, vk::Format format, vk::ImageLayout oldImageLayout, vk::ImageLayout newImageLayout );
|
2020-01-28 09:16:10 +00:00
|
|
|
|
|
|
|
struct WindowData
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
WindowData( GLFWwindow * wnd, std::string const & name, vk::Extent2D const & extent );
|
|
|
|
WindowData( const WindowData & ) = delete;
|
|
|
|
WindowData( WindowData && other );
|
2020-01-28 09:16:10 +00:00
|
|
|
~WindowData() noexcept;
|
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
GLFWwindow * handle;
|
|
|
|
std::string name;
|
2020-01-28 09:16:10 +00:00
|
|
|
vk::Extent2D extent;
|
|
|
|
};
|
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
WindowData createWindow( std::string const & windowName, vk::Extent2D const & extent );
|
2020-01-28 09:16:10 +00:00
|
|
|
|
2019-03-26 11:24:36 +00:00
|
|
|
struct BufferData
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
BufferData( vk::PhysicalDevice const & physicalDevice,
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Device const & device,
|
2020-04-12 19:49:12 +00:00
|
|
|
vk::DeviceSize size,
|
|
|
|
vk::BufferUsageFlags usage,
|
2022-02-28 09:11:04 +00:00
|
|
|
vk::MemoryPropertyFlags propertyFlags = vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent );
|
2019-03-26 11:24:36 +00:00
|
|
|
|
2021-02-17 09:49:59 +00:00
|
|
|
void clear( vk::Device const & device )
|
|
|
|
{
|
2023-08-08 11:40:56 +00:00
|
|
|
device.destroyBuffer( buffer ); // to prevent some validation layer warning, the Buffer needs to be destroyed before the bound DeviceMemory
|
2021-02-17 09:49:59 +00:00
|
|
|
device.freeMemory( deviceMemory );
|
|
|
|
}
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
template <typename DataType>
|
2021-02-17 09:49:59 +00:00
|
|
|
void upload( vk::Device const & device, DataType const & data ) const
|
2019-06-25 07:47:27 +00:00
|
|
|
{
|
2022-02-28 09:11:04 +00:00
|
|
|
assert( ( m_propertyFlags & vk::MemoryPropertyFlagBits::eHostCoherent ) && ( m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible ) );
|
2020-04-12 19:49:12 +00:00
|
|
|
assert( sizeof( DataType ) <= m_size );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2021-02-17 09:49:59 +00:00
|
|
|
void * dataPtr = device.mapMemory( deviceMemory, 0, sizeof( DataType ) );
|
2020-04-12 19:49:12 +00:00
|
|
|
memcpy( dataPtr, &data, sizeof( DataType ) );
|
2021-02-17 09:49:59 +00:00
|
|
|
device.unmapMemory( deviceMemory );
|
2019-06-25 07:47:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename DataType>
|
2021-02-17 09:49:59 +00:00
|
|
|
void upload( vk::Device const & device, std::vector<DataType> const & data, size_t stride = 0 ) const
|
2019-06-25 07:47:27 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
assert( m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
size_t elementSize = stride ? stride : sizeof( DataType );
|
|
|
|
assert( sizeof( DataType ) <= elementSize );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
copyToDevice( device, deviceMemory, data.data(), data.size(), elementSize );
|
2019-06-25 07:47:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename DataType>
|
2020-04-12 19:49:12 +00:00
|
|
|
void upload( vk::PhysicalDevice const & physicalDevice,
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Device const & device,
|
|
|
|
vk::CommandPool const & commandPool,
|
2020-04-12 19:49:12 +00:00
|
|
|
vk::Queue queue,
|
|
|
|
std::vector<DataType> const & data,
|
|
|
|
size_t stride ) const
|
2019-06-25 07:47:27 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
assert( m_usage & vk::BufferUsageFlagBits::eTransferDst );
|
|
|
|
assert( m_propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
size_t elementSize = stride ? stride : sizeof( DataType );
|
|
|
|
assert( sizeof( DataType ) <= elementSize );
|
2019-07-25 11:52:09 +00:00
|
|
|
|
|
|
|
size_t dataSize = data.size() * elementSize;
|
2020-04-12 19:49:12 +00:00
|
|
|
assert( dataSize <= m_size );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
vk::su::BufferData stagingBuffer( physicalDevice, device, dataSize, vk::BufferUsageFlagBits::eTransferSrc );
|
|
|
|
copyToDevice( device, stagingBuffer.deviceMemory, data.data(), data.size(), elementSize );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2022-02-28 09:11:04 +00:00
|
|
|
vk::su::oneTimeSubmit( device,
|
|
|
|
commandPool,
|
|
|
|
queue,
|
|
|
|
[&]( vk::CommandBuffer const & commandBuffer )
|
|
|
|
{ commandBuffer.copyBuffer( stagingBuffer.buffer, buffer, vk::BufferCopy( 0, 0, dataSize ) ); } );
|
2021-02-17 09:49:59 +00:00
|
|
|
|
|
|
|
stagingBuffer.clear( device );
|
2019-06-25 07:47:27 +00:00
|
|
|
}
|
|
|
|
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Buffer buffer;
|
|
|
|
vk::DeviceMemory deviceMemory;
|
2020-04-12 19:49:12 +00:00
|
|
|
#if !defined( NDEBUG )
|
|
|
|
private:
|
2019-06-25 07:47:27 +00:00
|
|
|
vk::DeviceSize m_size;
|
|
|
|
vk::BufferUsageFlags m_usage;
|
|
|
|
vk::MemoryPropertyFlags m_propertyFlags;
|
2020-01-28 09:16:10 +00:00
|
|
|
#endif
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct ImageData
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
ImageData( vk::PhysicalDevice const & physicalDevice,
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Device const & device,
|
2020-04-12 19:49:12 +00:00
|
|
|
vk::Format format,
|
|
|
|
vk::Extent2D const & extent,
|
|
|
|
vk::ImageTiling tiling,
|
|
|
|
vk::ImageUsageFlags usage,
|
|
|
|
vk::ImageLayout initialLayout,
|
|
|
|
vk::MemoryPropertyFlags memoryProperties,
|
|
|
|
vk::ImageAspectFlags aspectMask );
|
|
|
|
|
2021-02-17 09:49:59 +00:00
|
|
|
void clear( vk::Device const & device )
|
|
|
|
{
|
|
|
|
device.destroyImageView( imageView );
|
2023-08-08 11:40:56 +00:00
|
|
|
device.destroyImage( image ); // the Image should to be destroyed before the bound DeviceMemory is freed
|
2021-02-17 09:49:59 +00:00
|
|
|
device.freeMemory( deviceMemory );
|
|
|
|
}
|
|
|
|
|
|
|
|
vk::Format format;
|
|
|
|
vk::Image image;
|
|
|
|
vk::DeviceMemory deviceMemory;
|
|
|
|
vk::ImageView imageView;
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct DepthBufferData : public ImageData
|
|
|
|
{
|
2022-02-28 09:11:04 +00:00
|
|
|
DepthBufferData( vk::PhysicalDevice const & physicalDevice, vk::Device const & device, vk::Format format, vk::Extent2D const & extent );
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct SurfaceData
|
|
|
|
{
|
2021-02-17 09:49:59 +00:00
|
|
|
SurfaceData( vk::Instance const & instance, std::string const & windowName, vk::Extent2D const & extent );
|
2019-03-26 11:24:36 +00:00
|
|
|
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Extent2D extent;
|
|
|
|
WindowData window;
|
|
|
|
vk::SurfaceKHR surface;
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct SwapChainData
|
|
|
|
{
|
2021-02-17 09:49:59 +00:00
|
|
|
SwapChainData( vk::PhysicalDevice const & physicalDevice,
|
|
|
|
vk::Device const & device,
|
|
|
|
vk::SurfaceKHR const & surface,
|
|
|
|
vk::Extent2D const & extent,
|
|
|
|
vk::ImageUsageFlags usage,
|
|
|
|
vk::SwapchainKHR const & oldSwapChain,
|
|
|
|
uint32_t graphicsFamilyIndex,
|
|
|
|
uint32_t presentFamilyIndex );
|
|
|
|
|
|
|
|
void clear( vk::Device const & device )
|
|
|
|
{
|
|
|
|
for ( auto & imageView : imageViews )
|
|
|
|
{
|
|
|
|
device.destroyImageView( imageView );
|
|
|
|
}
|
|
|
|
imageViews.clear();
|
|
|
|
images.clear();
|
|
|
|
device.destroySwapchainKHR( swapChain );
|
|
|
|
}
|
|
|
|
|
|
|
|
vk::Format colorFormat;
|
|
|
|
vk::SwapchainKHR swapChain;
|
|
|
|
std::vector<vk::Image> images;
|
|
|
|
std::vector<vk::ImageView> imageViews;
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
class CheckerboardImageGenerator
|
2019-04-15 08:18:58 +00:00
|
|
|
{
|
2019-07-25 11:52:09 +00:00
|
|
|
public:
|
2022-02-28 09:11:04 +00:00
|
|
|
CheckerboardImageGenerator( std::array<uint8_t, 3> const & rgb0 = { { 0, 0, 0 } }, std::array<uint8_t, 3> const & rgb1 = { { 255, 255, 255 } } );
|
2019-07-25 11:52:09 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
void operator()( void * data, vk::Extent2D & extent ) const;
|
2019-07-25 11:52:09 +00:00
|
|
|
|
|
|
|
private:
|
2020-04-12 19:49:12 +00:00
|
|
|
std::array<uint8_t, 3> const & m_rgb0;
|
|
|
|
std::array<uint8_t, 3> const & m_rgb1;
|
2019-04-15 08:18:58 +00:00
|
|
|
};
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
class MonochromeImageGenerator
|
2019-05-09 13:25:40 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
public:
|
|
|
|
MonochromeImageGenerator( std::array<unsigned char, 3> const & rgb );
|
2019-05-09 13:25:40 +00:00
|
|
|
|
2020-10-20 07:55:19 +00:00
|
|
|
void operator()( void * data, vk::Extent2D const & extent ) const;
|
2019-05-09 13:25:40 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
private:
|
|
|
|
std::array<unsigned char, 3> const & m_rgb;
|
2019-06-25 07:47:27 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
class PixelsImageGenerator
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
public:
|
|
|
|
PixelsImageGenerator( vk::Extent2D const & extent, size_t channels, unsigned char const * pixels );
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2020-10-20 07:55:19 +00:00
|
|
|
void operator()( void * data, vk::Extent2D const & extent ) const;
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2020-04-12 19:49:12 +00:00
|
|
|
private:
|
2019-06-25 07:47:27 +00:00
|
|
|
vk::Extent2D m_extent;
|
|
|
|
size_t m_channels;
|
2020-04-12 19:49:12 +00:00
|
|
|
unsigned char const * m_pixels;
|
2019-05-09 13:25:40 +00:00
|
|
|
};
|
|
|
|
|
2019-03-26 11:24:36 +00:00
|
|
|
struct TextureData
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
TextureData( vk::PhysicalDevice const & physicalDevice,
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Device const & device,
|
2020-04-12 19:49:12 +00:00
|
|
|
vk::Extent2D const & extent_ = { 256, 256 },
|
|
|
|
vk::ImageUsageFlags usageFlags = {},
|
|
|
|
vk::FormatFeatureFlags formatFeatureFlags = {},
|
|
|
|
bool anisotropyEnable = false,
|
|
|
|
bool forceStaging = false );
|
2019-04-15 08:18:58 +00:00
|
|
|
|
2021-02-17 09:49:59 +00:00
|
|
|
void clear( vk::Device const & device )
|
|
|
|
{
|
|
|
|
if ( stagingBufferData )
|
|
|
|
{
|
|
|
|
stagingBufferData->clear( device );
|
|
|
|
}
|
|
|
|
imageData->clear( device );
|
|
|
|
device.destroySampler( sampler );
|
|
|
|
}
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
template <typename ImageGenerator>
|
2022-02-28 09:11:04 +00:00
|
|
|
void setImage( vk::Device const & device, vk::CommandBuffer const & commandBuffer, ImageGenerator const & imageGenerator )
|
2019-04-15 08:18:58 +00:00
|
|
|
{
|
2021-02-17 09:49:59 +00:00
|
|
|
void * data = needsStaging
|
2022-05-02 07:43:45 +00:00
|
|
|
? device.mapMemory( stagingBufferData->deviceMemory, 0, device.getBufferMemoryRequirements( stagingBufferData->buffer ).size )
|
|
|
|
: device.mapMemory( imageData->deviceMemory, 0, device.getImageMemoryRequirements( imageData->image ).size );
|
2020-04-12 19:49:12 +00:00
|
|
|
imageGenerator( data, extent );
|
2021-02-17 09:49:59 +00:00
|
|
|
device.unmapMemory( needsStaging ? stagingBufferData->deviceMemory : imageData->deviceMemory );
|
2020-04-12 19:49:12 +00:00
|
|
|
|
|
|
|
if ( needsStaging )
|
2019-04-15 08:18:58 +00:00
|
|
|
{
|
|
|
|
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
2022-02-28 09:11:04 +00:00
|
|
|
vk::su::setImageLayout( commandBuffer, imageData->image, imageData->format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal );
|
2020-04-12 19:49:12 +00:00
|
|
|
vk::BufferImageCopy copyRegion( 0,
|
|
|
|
extent.width,
|
|
|
|
extent.height,
|
|
|
|
vk::ImageSubresourceLayers( vk::ImageAspectFlagBits::eColor, 0, 0, 1 ),
|
|
|
|
vk::Offset3D( 0, 0, 0 ),
|
|
|
|
vk::Extent3D( extent, 1 ) );
|
2022-02-28 09:11:04 +00:00
|
|
|
commandBuffer.copyBufferToImage( stagingBufferData->buffer, imageData->image, vk::ImageLayout::eTransferDstOptimal, copyRegion );
|
2019-04-15 08:18:58 +00:00
|
|
|
// Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY
|
2022-02-28 09:11:04 +00:00
|
|
|
vk::su::setImageLayout(
|
|
|
|
commandBuffer, imageData->image, imageData->format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal );
|
2019-04-15 08:18:58 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// If we can use the linear tiled image as a texture, just do it
|
2022-02-28 09:11:04 +00:00
|
|
|
vk::su::setImageLayout(
|
|
|
|
commandBuffer, imageData->image, imageData->format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal );
|
2019-04-15 08:18:58 +00:00
|
|
|
}
|
|
|
|
}
|
2019-03-26 11:24:36 +00:00
|
|
|
|
|
|
|
vk::Format format;
|
|
|
|
vk::Extent2D extent;
|
|
|
|
bool needsStaging;
|
2019-05-21 13:44:52 +00:00
|
|
|
std::unique_ptr<BufferData> stagingBufferData;
|
2019-03-26 11:24:36 +00:00
|
|
|
std::unique_ptr<ImageData> imageData;
|
2021-02-17 09:49:59 +00:00
|
|
|
vk::Sampler sampler;
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
2019-05-09 13:25:40 +00:00
|
|
|
struct UUID
|
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
public:
|
|
|
|
UUID( uint8_t const data[VK_UUID_SIZE] );
|
2019-05-09 13:25:40 +00:00
|
|
|
|
|
|
|
uint8_t m_data[VK_UUID_SIZE];
|
|
|
|
};
|
|
|
|
|
2019-03-05 07:59:40 +00:00
|
|
|
template <typename TargetType, typename SourceType>
|
2020-04-12 19:49:12 +00:00
|
|
|
VULKAN_HPP_INLINE TargetType checked_cast( SourceType value )
|
2019-03-05 07:59:40 +00:00
|
|
|
{
|
2020-04-12 19:49:12 +00:00
|
|
|
static_assert( sizeof( TargetType ) <= sizeof( SourceType ), "No need to cast from smaller to larger type!" );
|
2020-09-30 10:00:32 +00:00
|
|
|
static_assert( std::numeric_limits<SourceType>::is_integer, "Only integer types supported!" );
|
2020-04-12 19:49:12 +00:00
|
|
|
static_assert( !std::numeric_limits<SourceType>::is_signed, "Only unsigned types supported!" );
|
2020-09-30 10:00:32 +00:00
|
|
|
static_assert( std::numeric_limits<TargetType>::is_integer, "Only integer types supported!" );
|
|
|
|
static_assert( !std::numeric_limits<TargetType>::is_signed, "Only unsigned types supported!" );
|
2020-04-12 19:49:12 +00:00
|
|
|
assert( value <= std::numeric_limits<TargetType>::max() );
|
|
|
|
return static_cast<TargetType>( value );
|
2019-03-05 07:59:40 +00:00
|
|
|
}
|
|
|
|
|
2022-02-28 09:11:04 +00:00
|
|
|
vk::DeviceMemory allocateDeviceMemory( vk::Device const & device,
|
|
|
|
vk::PhysicalDeviceMemoryProperties const & memoryProperties,
|
|
|
|
vk::MemoryRequirements const & memoryRequirements,
|
|
|
|
vk::MemoryPropertyFlags memoryPropertyFlags );
|
|
|
|
bool contains( std::vector<vk::ExtensionProperties> const & extensionProperties, std::string const & extensionName );
|
|
|
|
vk::CommandPool createCommandPool( vk::Device const & device, uint32_t queueFamilyIndex );
|
|
|
|
vk::DescriptorPool createDescriptorPool( vk::Device const & device, std::vector<vk::DescriptorPoolSize> const & poolSizes );
|
|
|
|
vk::DescriptorSetLayout createDescriptorSetLayout( vk::Device const & device,
|
|
|
|
std::vector<std::tuple<vk::DescriptorType, uint32_t, vk::ShaderStageFlags>> const & bindingData,
|
|
|
|
vk::DescriptorSetLayoutCreateFlags flags = {} );
|
|
|
|
vk::Device createDevice( vk::PhysicalDevice const & physicalDevice,
|
|
|
|
uint32_t queueFamilyIndex,
|
|
|
|
std::vector<std::string> const & extensions = {},
|
|
|
|
vk::PhysicalDeviceFeatures const * physicalDeviceFeatures = nullptr,
|
|
|
|
void const * pNext = nullptr );
|
|
|
|
std::vector<vk::Framebuffer> createFramebuffers( vk::Device const & device,
|
|
|
|
vk::RenderPass & renderPass,
|
|
|
|
std::vector<vk::ImageView> const & imageViews,
|
|
|
|
vk::ImageView const & depthImageView,
|
|
|
|
vk::Extent2D const & extent );
|
|
|
|
vk::Pipeline createGraphicsPipeline( vk::Device const & device,
|
|
|
|
vk::PipelineCache const & pipelineCache,
|
|
|
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const *> const & vertexShaderData,
|
|
|
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const *> const & fragmentShaderData,
|
|
|
|
uint32_t vertexStride,
|
|
|
|
std::vector<std::pair<vk::Format, uint32_t>> const & vertexInputAttributeFormatOffset,
|
|
|
|
vk::FrontFace frontFace,
|
|
|
|
bool depthBuffered,
|
|
|
|
vk::PipelineLayout const & pipelineLayout,
|
|
|
|
vk::RenderPass const & renderPass );
|
|
|
|
vk::Instance createInstance( std::string const & appName,
|
|
|
|
std::string const & engineName,
|
|
|
|
std::vector<std::string> const & layers = {},
|
|
|
|
std::vector<std::string> const & extensions = {},
|
|
|
|
uint32_t apiVersion = VK_API_VERSION_1_0 );
|
|
|
|
vk::RenderPass createRenderPass( vk::Device const & device,
|
|
|
|
vk::Format colorFormat,
|
|
|
|
vk::Format depthFormat,
|
|
|
|
vk::AttachmentLoadOp loadOp = vk::AttachmentLoadOp::eClear,
|
|
|
|
vk::ImageLayout colorFinalLayout = vk::ImageLayout::ePresentSrcKHR );
|
|
|
|
VKAPI_ATTR VkBool32 VKAPI_CALL debugUtilsMessengerCallback( VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
|
|
|
|
VkDebugUtilsMessageTypeFlagsEXT messageTypes,
|
|
|
|
VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData,
|
|
|
|
void * /*pUserData*/ );
|
|
|
|
uint32_t findGraphicsQueueFamilyIndex( std::vector<vk::QueueFamilyProperties> const & queueFamilyProperties );
|
|
|
|
std::pair<uint32_t, uint32_t> findGraphicsAndPresentQueueFamilyIndex( vk::PhysicalDevice physicalDevice, vk::SurfaceKHR const & surface );
|
|
|
|
uint32_t findMemoryType( vk::PhysicalDeviceMemoryProperties const & memoryProperties, uint32_t typeBits, vk::MemoryPropertyFlags requirementsMask );
|
|
|
|
std::vector<char const *> gatherExtensions( std::vector<std::string> const & extensions
|
2021-02-17 09:49:59 +00:00
|
|
|
#if !defined( NDEBUG )
|
|
|
|
,
|
|
|
|
std::vector<vk::ExtensionProperties> const & extensionProperties
|
|
|
|
#endif
|
|
|
|
);
|
|
|
|
std::vector<char const *> gatherLayers( std::vector<std::string> const & layers
|
|
|
|
#if !defined( NDEBUG )
|
|
|
|
,
|
|
|
|
std::vector<vk::LayerProperties> const & layerProperties
|
|
|
|
#endif
|
|
|
|
);
|
|
|
|
std::vector<std::string> getDeviceExtensions();
|
|
|
|
std::vector<std::string> getInstanceExtensions();
|
|
|
|
vk::DebugUtilsMessengerCreateInfoEXT makeDebugUtilsMessengerCreateInfoEXT();
|
|
|
|
#if defined( NDEBUG )
|
|
|
|
vk::StructureChain<vk::InstanceCreateInfo>
|
|
|
|
#else
|
|
|
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT>
|
|
|
|
#endif
|
|
|
|
makeInstanceCreateInfoChain( vk::ApplicationInfo const & applicationInfo,
|
|
|
|
std::vector<char const *> const & layers,
|
|
|
|
std::vector<char const *> const & extensions );
|
|
|
|
vk::Format pickDepthFormat( vk::PhysicalDevice const & physicalDevice );
|
|
|
|
vk::PresentModeKHR pickPresentMode( std::vector<vk::PresentModeKHR> const & presentModes );
|
|
|
|
vk::SurfaceFormatKHR pickSurfaceFormat( std::vector<vk::SurfaceFormatKHR> const & formats );
|
2022-02-28 09:11:04 +00:00
|
|
|
void submitAndWait( vk::Device const & device, vk::Queue const & queue, vk::CommandBuffer const & commandBuffer );
|
2022-05-02 07:43:45 +00:00
|
|
|
void updateDescriptorSets( vk::Device const & device,
|
|
|
|
vk::DescriptorSet const & descriptorSet,
|
|
|
|
std::vector<std::tuple<vk::DescriptorType, vk::Buffer const &, vk::DeviceSize, vk::BufferView const &>> const & bufferData,
|
|
|
|
vk::su::TextureData const & textureData,
|
|
|
|
uint32_t bindingOffset = 0 );
|
|
|
|
void updateDescriptorSets( vk::Device const & device,
|
|
|
|
vk::DescriptorSet const & descriptorSet,
|
|
|
|
std::vector<std::tuple<vk::DescriptorType, vk::Buffer const &, vk::DeviceSize, vk::BufferView const &>> const & bufferData,
|
|
|
|
std::vector<vk::su::TextureData> const & textureData,
|
|
|
|
uint32_t bindingOffset = 0 );
|
2020-04-12 19:49:12 +00:00
|
|
|
|
|
|
|
} // namespace su
|
|
|
|
} // namespace vk
|
|
|
|
|
|
|
|
std::ostream & operator<<( std::ostream & os, vk::su::UUID const & uuid );
|