2019-06-25 07:47:27 +00:00
|
|
|
#pragma once
|
|
|
|
|
2019-03-05 07:59:40 +00:00
|
|
|
// Copyright(c) 2019, NVIDIA CORPORATION. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
//
|
|
|
|
|
|
|
|
#include "vulkan/vulkan.hpp"
|
2020-01-28 09:16:10 +00:00
|
|
|
|
|
|
|
#define GLFW_INCLUDE_NONE
|
|
|
|
#include <GLFW/glfw3.h>
|
|
|
|
|
2019-03-05 07:59:40 +00:00
|
|
|
#include <iostream>
|
2019-06-25 07:47:27 +00:00
|
|
|
#include <map>
|
2019-03-05 07:59:40 +00:00
|
|
|
|
|
|
|
namespace vk
|
|
|
|
{
|
|
|
|
namespace su
|
|
|
|
{
|
|
|
|
const uint64_t FenceTimeout = 100000000;
|
|
|
|
|
2020-01-28 09:16:10 +00:00
|
|
|
template <typename Func>
|
|
|
|
void oneTimeSubmit(vk::UniqueCommandBuffer const& commandBuffer, vk::Queue const& queue, Func const& func)
|
|
|
|
{
|
|
|
|
commandBuffer->begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlagBits::eOneTimeSubmit));
|
|
|
|
func(commandBuffer);
|
|
|
|
commandBuffer->end();
|
|
|
|
queue.submit(vk::SubmitInfo(0, nullptr, nullptr, 1, &(*commandBuffer)), nullptr);
|
|
|
|
queue.waitIdle();
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename Func>
|
|
|
|
void oneTimeSubmit(vk::UniqueDevice const& device, vk::UniqueCommandPool const& commandPool, vk::Queue const& queue, Func const& func)
|
|
|
|
{
|
|
|
|
vk::UniqueCommandBuffer commandBuffer = std::move(device->allocateCommandBuffersUnique(vk::CommandBufferAllocateInfo(*commandPool, vk::CommandBufferLevel::ePrimary, 1)).front());
|
|
|
|
oneTimeSubmit(commandBuffer, queue, func);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T>
|
2020-03-23 13:59:37 +00:00
|
|
|
void copyToDevice(vk::UniqueDevice const& device, vk::UniqueDeviceMemory const& memory, T const* pData, size_t count, vk::DeviceSize stride = sizeof(T))
|
2020-01-28 09:16:10 +00:00
|
|
|
{
|
|
|
|
assert(sizeof(T) <= stride);
|
|
|
|
uint8_t* deviceData = static_cast<uint8_t*>(device->mapMemory(memory.get(), 0, count * stride));
|
|
|
|
if (stride == sizeof(T))
|
|
|
|
{
|
|
|
|
memcpy(deviceData, pData, count * sizeof(T));
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
for (size_t i = 0; i < count; i++)
|
|
|
|
{
|
|
|
|
memcpy(deviceData, &pData[i], sizeof(T));
|
|
|
|
deviceData += stride;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
device->unmapMemory(memory.get());
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
void copyToDevice(vk::UniqueDevice const& device, vk::UniqueDeviceMemory const& memory, T const& data)
|
|
|
|
{
|
|
|
|
copyToDevice<T>(device, memory, &data, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
template<class T>
|
|
|
|
VULKAN_HPP_INLINE constexpr const T& clamp(const T& v, const T& lo, const T& hi)
|
|
|
|
{
|
|
|
|
return v < lo ? lo : hi < v ? hi : v;
|
|
|
|
}
|
|
|
|
|
|
|
|
void setImageLayout(vk::UniqueCommandBuffer const &commandBuffer, vk::Image image, vk::Format format, vk::ImageLayout oldImageLayout, vk::ImageLayout newImageLayout);
|
|
|
|
|
|
|
|
struct WindowData
|
|
|
|
{
|
|
|
|
WindowData(GLFWwindow *wnd, std::string const& name, vk::Extent2D const& extent);
|
|
|
|
WindowData(const WindowData &) = delete;
|
|
|
|
WindowData(WindowData &&other);
|
|
|
|
~WindowData() noexcept;
|
|
|
|
|
|
|
|
GLFWwindow *handle;
|
|
|
|
std::string name;
|
|
|
|
vk::Extent2D extent;
|
|
|
|
};
|
|
|
|
|
|
|
|
WindowData createWindow(std::string const &windowName, vk::Extent2D const &extent);
|
|
|
|
|
2019-03-26 11:24:36 +00:00
|
|
|
struct BufferData
|
|
|
|
{
|
2019-05-21 13:44:52 +00:00
|
|
|
BufferData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::DeviceSize size, vk::BufferUsageFlags usage,
|
|
|
|
vk::MemoryPropertyFlags propertyFlags = vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent);
|
2019-03-26 11:24:36 +00:00
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
template <typename DataType>
|
|
|
|
void upload(vk::UniqueDevice const& device, DataType const& data) const
|
|
|
|
{
|
|
|
|
assert((m_propertyFlags & vk::MemoryPropertyFlagBits::eHostCoherent) && (m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible));
|
|
|
|
assert(sizeof(DataType) <= m_size);
|
|
|
|
|
|
|
|
void* dataPtr = device->mapMemory(*this->deviceMemory, 0, sizeof(DataType));
|
|
|
|
memcpy(dataPtr, &data, sizeof(DataType));
|
|
|
|
device->unmapMemory(*this->deviceMemory);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename DataType>
|
2019-07-25 11:52:09 +00:00
|
|
|
void upload(vk::UniqueDevice const& device, std::vector<DataType> const& data, size_t stride = 0) const
|
2019-06-25 07:47:27 +00:00
|
|
|
{
|
2019-07-25 11:52:09 +00:00
|
|
|
assert(m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible);
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2019-07-25 11:52:09 +00:00
|
|
|
size_t elementSize = stride ? stride : sizeof(DataType);
|
|
|
|
assert(sizeof(DataType) <= elementSize);
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2019-07-25 11:52:09 +00:00
|
|
|
copyToDevice(device, deviceMemory, data.data(), data.size(), elementSize);
|
2019-06-25 07:47:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename DataType>
|
2019-07-25 11:52:09 +00:00
|
|
|
void upload(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::UniqueCommandPool const& commandPool, vk::Queue queue, std::vector<DataType> const& data,
|
|
|
|
size_t stride) const
|
2019-06-25 07:47:27 +00:00
|
|
|
{
|
|
|
|
assert(m_usage & vk::BufferUsageFlagBits::eTransferDst);
|
|
|
|
assert(m_propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal);
|
|
|
|
|
2019-07-25 11:52:09 +00:00
|
|
|
size_t elementSize = stride ? stride : sizeof(DataType);
|
|
|
|
assert(sizeof(DataType) <= elementSize);
|
|
|
|
|
|
|
|
size_t dataSize = data.size() * elementSize;
|
2019-06-25 07:47:27 +00:00
|
|
|
assert(dataSize <= m_size);
|
|
|
|
|
|
|
|
vk::su::BufferData stagingBuffer(physicalDevice, device, dataSize, vk::BufferUsageFlagBits::eTransferSrc);
|
2019-07-25 11:52:09 +00:00
|
|
|
copyToDevice(device, stagingBuffer.deviceMemory, data.data(), data.size(), elementSize);
|
2019-06-25 07:47:27 +00:00
|
|
|
|
2019-07-25 11:52:09 +00:00
|
|
|
vk::su::oneTimeSubmit(device, commandPool, queue,
|
|
|
|
[&](vk::UniqueCommandBuffer const& commandBuffer) { commandBuffer->copyBuffer(*stagingBuffer.buffer, *this->buffer, vk::BufferCopy(0, 0, dataSize)); });
|
2019-06-25 07:47:27 +00:00
|
|
|
}
|
|
|
|
|
2019-03-26 11:24:36 +00:00
|
|
|
vk::UniqueBuffer buffer;
|
|
|
|
vk::UniqueDeviceMemory deviceMemory;
|
2019-06-25 07:47:27 +00:00
|
|
|
#if !defined(NDEBUG)
|
|
|
|
private:
|
|
|
|
vk::DeviceSize m_size;
|
|
|
|
vk::BufferUsageFlags m_usage;
|
|
|
|
vk::MemoryPropertyFlags m_propertyFlags;
|
2020-01-28 09:16:10 +00:00
|
|
|
#endif
|
2019-03-26 11:24:36 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct ImageData
|
|
|
|
{
|
2019-05-21 13:44:52 +00:00
|
|
|
ImageData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::Format format, vk::Extent2D const& extent, vk::ImageTiling tiling, vk::ImageUsageFlags usage
|
2019-06-25 07:47:27 +00:00
|
|
|
, vk::ImageLayout initialLayout, vk::MemoryPropertyFlags memoryProperties, vk::ImageAspectFlags aspectMask);
|
2019-03-26 11:24:36 +00:00
|
|
|
|
|
|
|
vk::Format format;
|
|
|
|
vk::UniqueImage image;
|
|
|
|
vk::UniqueDeviceMemory deviceMemory;
|
|
|
|
vk::UniqueImageView imageView;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct DepthBufferData : public ImageData
|
|
|
|
{
|
|
|
|
DepthBufferData(vk::PhysicalDevice &physicalDevice, vk::UniqueDevice & device, vk::Format format, vk::Extent2D const& extent);
|
|
|
|
};
|
|
|
|
|
|
|
|
struct SurfaceData
|
|
|
|
{
|
2020-01-28 09:16:10 +00:00
|
|
|
SurfaceData(vk::UniqueInstance &instance, std::string const& windowName, vk::Extent2D const& extent);
|
2019-03-26 11:24:36 +00:00
|
|
|
|
|
|
|
vk::Extent2D extent;
|
2020-01-28 09:16:10 +00:00
|
|
|
WindowData window;
|
2019-03-26 11:24:36 +00:00
|
|
|
vk::UniqueSurfaceKHR surface;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct SwapChainData
|
|
|
|
{
|
2019-06-25 07:47:27 +00:00
|
|
|
SwapChainData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::SurfaceKHR const& surface, vk::Extent2D const& extent, vk::ImageUsageFlags usage,
|
|
|
|
vk::UniqueSwapchainKHR const& oldSwapChain, uint32_t graphicsFamilyIndex, uint32_t presentFamilyIndex);
|
2019-03-26 11:24:36 +00:00
|
|
|
|
|
|
|
vk::Format colorFormat;
|
|
|
|
vk::UniqueSwapchainKHR swapChain;
|
|
|
|
std::vector<vk::Image> images;
|
|
|
|
std::vector<vk::UniqueImageView> imageViews;
|
|
|
|
};
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
class CheckerboardImageGenerator
|
2019-04-15 08:18:58 +00:00
|
|
|
{
|
2019-07-25 11:52:09 +00:00
|
|
|
public:
|
|
|
|
CheckerboardImageGenerator(std::array<uint8_t, 3> const& rgb0 = {0, 0, 0}, std::array<uint8_t, 3> const& rgb1 = {255, 255, 255});
|
|
|
|
|
2019-04-15 08:18:58 +00:00
|
|
|
void operator()(void* data, vk::Extent2D &extent) const;
|
2019-07-25 11:52:09 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
std::array<uint8_t, 3> const& m_rgb0;
|
|
|
|
std::array<uint8_t, 3> const& m_rgb1;
|
2019-04-15 08:18:58 +00:00
|
|
|
};
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
class MonochromeImageGenerator
|
2019-05-09 13:25:40 +00:00
|
|
|
{
|
2019-06-25 07:47:27 +00:00
|
|
|
public:
|
|
|
|
MonochromeImageGenerator(std::array<unsigned char, 3> const& rgb);
|
2019-05-09 13:25:40 +00:00
|
|
|
|
|
|
|
void operator()(void* data, vk::Extent2D &extent) const;
|
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
private:
|
|
|
|
std::array<unsigned char, 3> const& m_rgb;
|
|
|
|
};
|
|
|
|
|
|
|
|
class PixelsImageGenerator
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
PixelsImageGenerator(vk::Extent2D const& extent, size_t channels, unsigned char const* pixels);
|
|
|
|
|
|
|
|
void operator()(void* data, vk::Extent2D & extent) const;
|
|
|
|
|
|
|
|
private:
|
|
|
|
vk::Extent2D m_extent;
|
|
|
|
size_t m_channels;
|
|
|
|
unsigned char const* m_pixels;
|
2019-05-09 13:25:40 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2019-03-26 11:24:36 +00:00
|
|
|
struct TextureData
|
|
|
|
{
|
2019-06-25 07:47:27 +00:00
|
|
|
TextureData(vk::PhysicalDevice const& physicalDevice, vk::UniqueDevice const& device, vk::Extent2D const& extent_ = {256, 256}, vk::ImageUsageFlags usageFlags = {},
|
|
|
|
vk::FormatFeatureFlags formatFeatureFlags = {}, bool anisotropyEnable = false, bool forceStaging = false);
|
2019-04-15 08:18:58 +00:00
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
template <typename ImageGenerator>
|
|
|
|
void setImage(vk::UniqueDevice const& device, vk::UniqueCommandBuffer const& commandBuffer, ImageGenerator const& imageGenerator)
|
2019-04-15 08:18:58 +00:00
|
|
|
{
|
|
|
|
void* data = needsStaging
|
2019-05-21 13:44:52 +00:00
|
|
|
? device->mapMemory(stagingBufferData->deviceMemory.get(), 0, device->getBufferMemoryRequirements(stagingBufferData->buffer.get()).size)
|
2019-04-15 08:18:58 +00:00
|
|
|
: device->mapMemory(imageData->deviceMemory.get(), 0, device->getImageMemoryRequirements(imageData->image.get()).size);
|
2019-06-25 07:47:27 +00:00
|
|
|
imageGenerator(data, extent);
|
2019-05-21 13:44:52 +00:00
|
|
|
device->unmapMemory(needsStaging ? stagingBufferData->deviceMemory.get() : imageData->deviceMemory.get());
|
2019-03-26 11:24:36 +00:00
|
|
|
|
2019-04-15 08:18:58 +00:00
|
|
|
if (needsStaging)
|
|
|
|
{
|
|
|
|
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::eUndefined, vk::ImageLayout::eTransferDstOptimal);
|
2019-04-15 08:18:58 +00:00
|
|
|
vk::BufferImageCopy copyRegion(0, extent.width, extent.height, vk::ImageSubresourceLayers(vk::ImageAspectFlagBits::eColor, 0, 0, 1), vk::Offset3D(0, 0, 0), vk::Extent3D(extent, 1));
|
2019-05-21 13:44:52 +00:00
|
|
|
commandBuffer->copyBufferToImage(stagingBufferData->buffer.get(), imageData->image.get(), vk::ImageLayout::eTransferDstOptimal, copyRegion);
|
2019-04-15 08:18:58 +00:00
|
|
|
// Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal);
|
2019-04-15 08:18:58 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// If we can use the linear tiled image as a texture, just do it
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::ePreinitialized, vk::ImageLayout::eShaderReadOnlyOptimal);
|
2019-04-15 08:18:58 +00:00
|
|
|
}
|
|
|
|
}
|
2019-03-26 11:24:36 +00:00
|
|
|
|
|
|
|
vk::Format format;
|
|
|
|
vk::Extent2D extent;
|
|
|
|
bool needsStaging;
|
2019-05-21 13:44:52 +00:00
|
|
|
std::unique_ptr<BufferData> stagingBufferData;
|
2019-03-26 11:24:36 +00:00
|
|
|
std::unique_ptr<ImageData> imageData;
|
|
|
|
vk::UniqueSampler textureSampler;
|
|
|
|
};
|
|
|
|
|
2019-05-09 13:25:40 +00:00
|
|
|
struct UUID
|
|
|
|
{
|
2019-06-25 07:47:27 +00:00
|
|
|
public:
|
2019-10-28 14:36:21 +00:00
|
|
|
UUID(uint8_t const data[VK_UUID_SIZE]);
|
2019-05-09 13:25:40 +00:00
|
|
|
|
|
|
|
uint8_t m_data[VK_UUID_SIZE];
|
|
|
|
};
|
|
|
|
|
2019-03-26 11:24:36 +00:00
|
|
|
|
2019-03-05 07:59:40 +00:00
|
|
|
template <typename TargetType, typename SourceType>
|
|
|
|
VULKAN_HPP_INLINE TargetType checked_cast(SourceType value)
|
|
|
|
{
|
|
|
|
static_assert(sizeof(TargetType) <= sizeof(SourceType), "No need to cast from smaller to larger type!");
|
|
|
|
static_assert(!std::numeric_limits<TargetType>::is_signed, "Only unsigned types supported!");
|
|
|
|
static_assert(!std::numeric_limits<SourceType>::is_signed, "Only unsigned types supported!");
|
|
|
|
assert(value <= std::numeric_limits<TargetType>::max());
|
|
|
|
return static_cast<TargetType>(value);
|
|
|
|
}
|
|
|
|
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::UniqueDeviceMemory allocateMemory(vk::UniqueDevice const& device, vk::PhysicalDeviceMemoryProperties const& memoryProperties, vk::MemoryRequirements const& memoryRequirements,
|
|
|
|
vk::MemoryPropertyFlags memoryPropertyFlags);
|
2019-10-28 14:36:21 +00:00
|
|
|
bool contains(std::vector<vk::ExtensionProperties> const& extensionProperties, std::string const& extensionName);
|
2019-03-15 09:40:45 +00:00
|
|
|
vk::UniqueCommandPool createCommandPool(vk::UniqueDevice &device, uint32_t queueFamilyIndex);
|
2019-09-25 09:56:46 +00:00
|
|
|
vk::UniqueDebugUtilsMessengerEXT createDebugUtilsMessenger(vk::UniqueInstance &instance);
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::UniqueDescriptorPool createDescriptorPool(vk::UniqueDevice &device, std::vector<vk::DescriptorPoolSize> const& poolSizes);
|
2019-06-25 07:47:27 +00:00
|
|
|
vk::UniqueDescriptorSetLayout createDescriptorSetLayout(vk::UniqueDevice const& device, std::vector<std::tuple<vk::DescriptorType, uint32_t, vk::ShaderStageFlags>> const& bindingData,
|
|
|
|
vk::DescriptorSetLayoutCreateFlags flags = {});
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::UniqueDevice createDevice(vk::PhysicalDevice physicalDevice, uint32_t queueFamilyIndex, std::vector<std::string> const& extensions = {}, vk::PhysicalDeviceFeatures const* physicalDeviceFeatures = nullptr, void const* pNext = nullptr);
|
2019-04-15 08:18:58 +00:00
|
|
|
std::vector<vk::UniqueFramebuffer> createFramebuffers(vk::UniqueDevice &device, vk::UniqueRenderPass &renderPass, std::vector<vk::UniqueImageView> const& imageViews, vk::UniqueImageView const& depthImageView, vk::Extent2D const& extent);
|
2019-06-25 07:47:27 +00:00
|
|
|
vk::UniquePipeline createGraphicsPipeline(vk::UniqueDevice const& device, vk::UniquePipelineCache const& pipelineCache,
|
|
|
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> const& vertexShaderData,
|
|
|
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> const& fragmentShaderData, uint32_t vertexStride,
|
|
|
|
std::vector<std::pair<vk::Format, uint32_t>> const& vertexInputAttributeFormatOffset, vk::FrontFace frontFace, bool depthBuffered,
|
|
|
|
vk::UniquePipelineLayout const& pipelineLayout, vk::UniqueRenderPass const& renderPass);
|
2019-07-09 07:25:48 +00:00
|
|
|
vk::UniqueInstance createInstance(std::string const& appName, std::string const& engineName, std::vector<std::string> const& layers = {}, std::vector<std::string> const& extensions = {},
|
|
|
|
uint32_t apiVersion = VK_API_VERSION_1_0);
|
2019-04-15 08:18:58 +00:00
|
|
|
vk::UniqueRenderPass createRenderPass(vk::UniqueDevice &device, vk::Format colorFormat, vk::Format depthFormat, vk::AttachmentLoadOp loadOp = vk::AttachmentLoadOp::eClear, vk::ImageLayout colorFinalLayout = vk::ImageLayout::ePresentSrcKHR);
|
2020-03-19 18:56:18 +00:00
|
|
|
VKAPI_ATTR VkBool32 VKAPI_CALL debugUtilsMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, VkDebugUtilsMessengerCallbackDataEXT const * pCallbackData, void * /*pUserData*/);
|
2019-03-05 07:59:40 +00:00
|
|
|
uint32_t findGraphicsQueueFamilyIndex(std::vector<vk::QueueFamilyProperties> const& queueFamilyProperties);
|
2019-05-21 13:44:52 +00:00
|
|
|
std::pair<uint32_t, uint32_t> findGraphicsAndPresentQueueFamilyIndex(vk::PhysicalDevice physicalDevice, vk::SurfaceKHR const& surface);
|
2019-03-15 09:40:45 +00:00
|
|
|
uint32_t findMemoryType(vk::PhysicalDeviceMemoryProperties const& memoryProperties, uint32_t typeBits, vk::MemoryPropertyFlags requirementsMask);
|
2019-03-05 07:59:40 +00:00
|
|
|
std::vector<std::string> getDeviceExtensions();
|
|
|
|
std::vector<std::string> getInstanceExtensions();
|
2019-06-25 07:47:27 +00:00
|
|
|
vk::Format pickDepthFormat(vk::PhysicalDevice const& physicalDevice);
|
2019-05-21 13:44:52 +00:00
|
|
|
vk::PresentModeKHR pickPresentMode(std::vector<vk::PresentModeKHR> const& presentModes);
|
|
|
|
vk::SurfaceFormatKHR pickSurfaceFormat(std::vector<vk::SurfaceFormatKHR> const& formats);
|
2019-03-15 09:40:45 +00:00
|
|
|
void submitAndWait(vk::UniqueDevice &device, vk::Queue queue, vk::UniqueCommandBuffer &commandBuffer);
|
2019-07-03 07:23:56 +00:00
|
|
|
void updateDescriptorSets(vk::UniqueDevice const& device, vk::UniqueDescriptorSet const& descriptorSet,
|
2019-07-25 11:52:09 +00:00
|
|
|
std::vector<std::tuple<vk::DescriptorType, vk::UniqueBuffer const&, vk::UniqueBufferView const&>> const& bufferData, vk::su::TextureData const& textureData,
|
|
|
|
uint32_t bindingOffset = 0);
|
2019-07-03 07:23:56 +00:00
|
|
|
void updateDescriptorSets(vk::UniqueDevice const& device, vk::UniqueDescriptorSet const& descriptorSet,
|
|
|
|
std::vector<std::tuple<vk::DescriptorType, vk::UniqueBuffer const&, vk::UniqueBufferView const&>> const& bufferData,
|
2019-07-25 11:52:09 +00:00
|
|
|
std::vector<vk::su::TextureData> const& textureData, uint32_t bindingOffset = 0);
|
2019-03-05 07:59:40 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
}
|
2019-05-09 13:25:40 +00:00
|
|
|
|
2019-06-25 07:47:27 +00:00
|
|
|
std::ostream& operator<<(std::ostream& os, vk::su::UUID const& uuid);
|